1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
24
#include "python-compat.h"
26
#include "_static_tuple_c.h"
27
#include "_export_c_api.h"
29
/* Pyrex 0.9.6.4 exports _simple_set_pyx_api as
30
* import__simple_set_pyx(), while Pyrex 0.9.8.5 and Cython 0.11.3 export them
31
* as import_bzrlib___simple_set_pyx(). As such, we just #define one to be
32
* equivalent to the other in our internal code.
34
#define import__simple_set_pyx import_bzrlib___simple_set_pyx
35
#include "_simple_set_pyx_api.h"
38
# define inline __inline__
39
#elif defined(_MSC_VER)
40
# define inline __inline
46
/* The one and only StaticTuple with no values */
47
static StaticTuple *_empty_tuple = NULL;
48
static PyObject *_interned_tuples = NULL;
52
_StaticTuple_is_interned(StaticTuple *self)
54
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
60
StaticTuple_as_tuple(StaticTuple *self)
62
PyObject *tpl = NULL, *obj = NULL;
66
tpl = PyTuple_New(len);
71
for (i = 0; i < len; ++i) {
72
obj = (PyObject *)self->items[i];
74
PyTuple_SET_ITEM(tpl, i, obj);
80
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
83
StaticTuple_Intern(StaticTuple *self)
85
PyObject *canonical_tuple = NULL;
87
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
91
/* SimpleSet_Add returns whatever object is present at self
92
* or the new object if it needs to add it.
94
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
95
if (!canonical_tuple) {
96
// Some sort of exception, propogate it.
99
if (canonical_tuple != (PyObject *)self) {
100
// There was already a tuple with that value
101
return (StaticTuple *)canonical_tuple;
103
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
104
// The two references in the dict do not count, so that the StaticTuple
105
// object does not become immortal just because it was interned.
106
Py_REFCNT(self) -= 1;
110
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
111
"Return a 'canonical' StaticTuple object.\n"
112
"Similar to intern() for strings, this makes sure there\n"
113
"is only one StaticTuple object for a given value\n."
115
" key = StaticTuple('foo', 'bar').intern()\n";
119
StaticTuple_dealloc(StaticTuple *self)
123
if (_StaticTuple_is_interned(self)) {
124
/* revive dead object temporarily for Discard */
126
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
127
Py_FatalError("deletion of interned StaticTuple failed");
128
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
131
for (i = 0; i < len; ++i) {
132
Py_XDECREF(self->items[i]);
134
Py_TYPE(self)->tp_free((PyObject *)self);
138
/* Similar to PyTuple_New() */
140
StaticTuple_New(Py_ssize_t size)
144
PyErr_BadInternalCall();
148
if (size == 0 && _empty_tuple != NULL) {
149
Py_INCREF(_empty_tuple);
152
/* Note that we use PyObject_NewVar because we want to allocate a variable
153
* width entry. However we *aren't* truly a PyVarObject because we don't
154
* use a long for ob_size. Instead we use a plain 'size' that is an int,
155
* and will be overloaded with flags in the future.
156
* As such we do the alloc, and then have to clean up anything it does
159
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
160
if (stuple == NULL) {
165
stuple->_unused0 = 0;
166
stuple->_unused1 = 0;
168
memset(stuple->items, 0, sizeof(PyObject *) * size);
170
#if STATIC_TUPLE_HAS_HASH
178
StaticTuple_FromSequence(PyObject *sequence)
184
if (StaticTuple_CheckExact(sequence)) {
186
return (StaticTuple *)sequence;
188
if (!PySequence_Check(sequence)) {
189
PyErr_Format(PyExc_TypeError, "Type %s is not a sequence type",
190
Py_TYPE(sequence)->tp_name);
193
size = PySequence_Size(sequence);
196
new = StaticTuple_New(size);
200
for (i = 0; i < size; ++i) {
201
// This returns a new reference, which we then 'steal' with
202
// StaticTuple_SET_ITEM
203
item = PySequence_GetItem(sequence, i);
208
StaticTuple_SET_ITEM(new, i, item);
210
return (StaticTuple *)new;
214
StaticTuple_from_sequence(PyObject *self, PyObject *args, PyObject *kwargs)
217
if (!PyArg_ParseTuple(args, "O", &sequence))
219
return StaticTuple_FromSequence(sequence);
224
StaticTuple_new_constructor(PyTypeObject *type, PyObject *args, PyObject *kwds)
227
PyObject *obj = NULL;
228
Py_ssize_t i, len = 0;
230
if (type != &StaticTuple_Type) {
231
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
234
if (!PyTuple_CheckExact(args)) {
235
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
238
len = PyTuple_GET_SIZE(args);
239
if (len < 0 || len > 255) {
240
/* Too big or too small */
241
PyErr_SetString(PyExc_ValueError, "StaticTuple.__init__(...)"
242
" takes from 0 to 255 items");
245
self = (StaticTuple *)StaticTuple_New(len);
249
for (i = 0; i < len; ++i) {
250
obj = PyTuple_GET_ITEM(args, i);
251
if (!PyString_CheckExact(obj)) {
252
if (!StaticTuple_CheckExact(obj)) {
253
PyErr_SetString(PyExc_TypeError, "StaticTuple.__init__(...)"
254
" requires that all items are strings or StaticTuple.");
255
type->tp_dealloc((PyObject *)self);
260
self->items[i] = obj;
262
return (PyObject *)self;
266
StaticTuple_repr(StaticTuple *self)
268
PyObject *as_tuple, *tuple_repr, *result;
270
as_tuple = StaticTuple_as_tuple(self);
271
if (as_tuple == NULL) {
274
tuple_repr = PyObject_Repr(as_tuple);
276
if (tuple_repr == NULL) {
279
result = PyString_FromFormat("%s%s", Py_TYPE(self)->tp_name,
280
PyString_AsString(tuple_repr));
285
StaticTuple_hash(StaticTuple *self)
287
/* adapted from tuplehash(), is the specific hash value considered
291
Py_ssize_t len = self->size;
293
long mult = 1000003L;
295
#if STATIC_TUPLE_HAS_HASH
296
if (self->hash != -1) {
302
// TODO: We could set specific flags if we know that, for example, all the
303
// items are strings. I haven't seen a real-world benefit to that
306
y = PyObject_Hash(*p++);
307
if (y == -1) /* failure */
310
/* the cast might truncate len; that doesn't change hash stability */
311
mult += (long)(82520L + len + len);
316
#if STATIC_TUPLE_HAS_HASH
323
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
326
PyObject *result = NULL;
328
vt = StaticTuple_as_tuple((StaticTuple *)v);
332
if (!PyTuple_Check(wt)) {
333
PyErr_BadInternalCall();
336
/* Now we have 2 tuples to compare, do it */
337
result = PyTuple_Type.tp_richcompare(vt, wt, op);
343
/** Compare two objects to determine if they are equivalent.
344
* The basic flow is as follows
345
* 1) First make sure that both objects are StaticTuple instances. If they
346
* aren't then cast self to a tuple, and have the tuple do the comparison.
347
* 2) Special case comparison to Py_None, because it happens to occur fairly
348
* often in the test suite.
349
* 3) Special case when v and w are the same pointer. As we know the answer to
350
* all queries without walking individual items.
351
* 4) For all operations, we then walk the items to find the first paired
352
* items that are not equal.
353
* 5) If all items found are equal, we then check the length of self and
354
* other to determine equality.
355
* 6) If an item differs, then we apply "op" to those last two items. (eg.
356
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
360
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
362
StaticTuple *v_st, *w_st;
363
Py_ssize_t vlen, wlen, min_len, i;
364
PyObject *v_obj, *w_obj;
365
richcmpfunc string_richcompare;
367
if (!StaticTuple_CheckExact(v)) {
368
/* This has never triggered, according to python-dev it seems this
369
* might trigger if '__op__' is defined but '__rop__' is not, sort of
370
* case. Such as "None == StaticTuple()"
372
fprintf(stderr, "self is not StaticTuple\n");
373
Py_INCREF(Py_NotImplemented);
374
return Py_NotImplemented;
376
v_st = (StaticTuple *)v;
377
if (StaticTuple_CheckExact(w)) {
378
/* The most common case */
379
w_st = (StaticTuple*)w;
380
} else if (PyTuple_Check(w)) {
381
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
384
/* TODO: This seems to be triggering more than I thought it would...
385
* We probably want to optimize comparing self to other when
388
return StaticTuple_richcompare_to_tuple(v_st, w, op);
389
} else if (w == Py_None) {
390
// None is always less than the object
392
case Py_NE:case Py_GT:case Py_GE:
395
case Py_EQ:case Py_LT:case Py_LE:
398
default: // Should never happen
399
return Py_NotImplemented;
402
/* We don't special case this comparison, we just let python handle
405
Py_INCREF(Py_NotImplemented);
406
return Py_NotImplemented;
408
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
409
* This code is inspired from tuplerichcompare, except we know our
410
* objects are limited in scope, so we can inline some comparisons.
413
/* Identical pointers, we can shortcut this easily. */
415
case Py_EQ:case Py_LE:case Py_GE:
418
case Py_NE:case Py_LT:case Py_GT:
424
&& _StaticTuple_is_interned(v_st)
425
&& _StaticTuple_is_interned(w_st))
427
/* If both objects are interned, we know they are different if the
428
* pointer is not the same, which would have been handled by the
429
* previous if. No need to compare the entries.
435
/* The only time we are likely to compare items of different lengths is in
436
* something like the interned_keys set. However, the hash is good enough
437
* that it is rare. Note that 'tuple_richcompare' also does not compare
442
min_len = (vlen < wlen) ? vlen : wlen;
443
string_richcompare = PyString_Type.tp_richcompare;
444
for (i = 0; i < min_len; i++) {
445
PyObject *result = NULL;
446
v_obj = StaticTuple_GET_ITEM(v_st, i);
447
w_obj = StaticTuple_GET_ITEM(w_st, i);
448
if (v_obj == w_obj) {
449
/* Shortcut case, these must be identical */
452
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
453
result = string_richcompare(v_obj, w_obj, Py_EQ);
454
} else if (StaticTuple_CheckExact(v_obj) &&
455
StaticTuple_CheckExact(w_obj))
457
/* Both are StaticTuple types, so recurse */
458
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
460
/* Not the same type, obviously they won't compare equal */
463
if (result == NULL) {
464
return NULL; /* There seems to be an error */
466
if (result == Py_NotImplemented) {
468
/* One side must have had a string and the other a StaticTuple.
469
* This clearly means that they are not equal.
475
result = PyObject_RichCompare(v_obj, w_obj, Py_EQ);
477
if (result == Py_False) {
478
/* This entry is not identical
487
if (result != Py_True) {
488
/* We don't know *what* richcompare is returning, but it
489
* isn't something we recognize
491
PyErr_BadInternalCall();
498
/* We walked off one of the lists, but everything compared equal so
499
* far. Just compare the size.
504
case Py_LT: cmp = vlen < wlen; break;
505
case Py_LE: cmp = vlen <= wlen; break;
506
case Py_EQ: cmp = vlen == wlen; break;
507
case Py_NE: cmp = vlen != wlen; break;
508
case Py_GT: cmp = vlen > wlen; break;
509
case Py_GE: cmp = vlen >= wlen; break;
510
default: return NULL; /* cannot happen */
519
/* The last item differs, shortcut the Py_NE case */
524
/* It is some other comparison, go ahead and do the real check. */
525
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
527
return string_richcompare(v_obj, w_obj, op);
528
} else if (StaticTuple_CheckExact(v_obj) &&
529
StaticTuple_CheckExact(w_obj))
531
/* Both are StaticTuple types, so recurse */
532
return StaticTuple_richcompare(v_obj, w_obj, op);
534
Py_INCREF(Py_NotImplemented);
535
return Py_NotImplemented;
541
StaticTuple_length(StaticTuple *self)
548
StaticTuple__is_interned(StaticTuple *self)
550
if (_StaticTuple_is_interned(self)) {
558
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
559
"Check to see if this tuple has been interned.\n";
563
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
566
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
567
* long long, etc. offsets should never be >2**31 anyway.
570
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
571
" negative indices: %d\n", (int)offset);
572
} else if (offset >= self->size) {
573
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
574
" %d >= %d", (int)offset, (int)self->size);
577
obj = (PyObject *)self->items[offset];
583
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
585
PyObject *as_tuple, *result;
587
as_tuple = StaticTuple_as_tuple(self);
588
if (as_tuple == NULL) {
591
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
597
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
600
for (i = self->size; --i >= 0;) {
601
Py_VISIT(self->items[i]);
606
static char StaticTuple_doc[] =
607
"C implementation of a StaticTuple structure."
608
"\n This is used as StaticTuple(item1, item2, item3)"
609
"\n This is similar to tuple, less flexible in what it"
610
"\n supports, but also lighter memory consumption."
611
"\n Note that the constructor mimics the () form of tuples"
612
"\n Rather than the 'tuple()' constructor."
613
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
615
static PyMethodDef StaticTuple_methods[] = {
616
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
617
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
618
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
619
StaticTuple__is_interned_doc},
620
{"from_sequence", (PyCFunction)StaticTuple_from_sequence,
621
METH_STATIC | METH_VARARGS,
622
"Create a StaticTuple from a given sequence. This functions"
623
" the same as the tuple() constructor."},
624
{NULL, NULL} /* sentinel */
627
static PySequenceMethods StaticTuple_as_sequence = {
628
(lenfunc)StaticTuple_length, /* sq_length */
631
(ssizeargfunc)StaticTuple_item, /* sq_item */
632
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
634
0, /* sq_ass_slice */
638
/* TODO: Implement StaticTuple_as_mapping.
639
* The only thing we really want to support from there is mp_subscript,
640
* so that we could support extended slicing (foo[::2]). Not worth it
645
PyTypeObject StaticTuple_Type = {
646
PyObject_HEAD_INIT(NULL)
648
"StaticTuple", /* tp_name */
649
sizeof(StaticTuple), /* tp_basicsize */
650
sizeof(PyObject *), /* tp_itemsize */
651
(destructor)StaticTuple_dealloc, /* tp_dealloc */
656
(reprfunc)StaticTuple_repr, /* tp_repr */
657
0, /* tp_as_number */
658
&StaticTuple_as_sequence, /* tp_as_sequence */
659
0, /* tp_as_mapping */
660
(hashfunc)StaticTuple_hash, /* tp_hash */
665
0, /* tp_as_buffer */
666
Py_TPFLAGS_DEFAULT, /* tp_flags*/
667
StaticTuple_doc, /* tp_doc */
668
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
669
* And we don't include this object in the garbage collector because we
670
* know it doesn't create cycles. However, 'meliae' will follow
671
* tp_traverse, even if the object isn't GC, and we want that.
673
(traverseproc)StaticTuple_traverse, /* tp_traverse */
675
StaticTuple_richcompare, /* tp_richcompare */
676
0, /* tp_weaklistoffset */
677
// without implementing tp_iter, Python will fall back to PySequence*
678
// which seems to work ok, we may need something faster/lighter in the
682
StaticTuple_methods, /* tp_methods */
687
0, /* tp_descr_get */
688
0, /* tp_descr_set */
689
0, /* tp_dictoffset */
692
StaticTuple_new_constructor, /* tp_new */
696
static PyMethodDef static_tuple_c_methods[] = {
702
setup_interned_tuples(PyObject *m)
704
_interned_tuples = (PyObject *)SimpleSet_New();
705
if (_interned_tuples != NULL) {
706
Py_INCREF(_interned_tuples);
707
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
713
setup_empty_tuple(PyObject *m)
716
if (_interned_tuples == NULL) {
717
fprintf(stderr, "You need to call setup_interned_tuples() before"
718
" setup_empty_tuple, because we intern it.\n");
720
// We need to create the empty tuple
721
stuple = (StaticTuple *)StaticTuple_New(0);
722
_empty_tuple = StaticTuple_Intern(stuple);
723
assert(_empty_tuple == stuple);
724
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
725
// intern(). We will keep 1 for the _empty_tuple global, and use the other
726
// for the module reference.
727
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
731
_StaticTuple_CheckExact(PyObject *obj)
733
return StaticTuple_CheckExact(obj);
737
setup_c_api(PyObject *m)
739
_export_function(m, "StaticTuple_New", StaticTuple_New,
740
"StaticTuple *(Py_ssize_t)");
741
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
742
"StaticTuple *(StaticTuple *)");
743
_export_function(m, "StaticTuple_FromSequence", StaticTuple_FromSequence,
744
"StaticTuple *(PyObject *)");
745
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
751
_workaround_pyrex_096(void)
753
/* Work around an incompatibility in how pyrex 0.9.6 exports a module,
754
* versus how pyrex 0.9.8 and cython 0.11 export it.
755
* Namely 0.9.6 exports import__simple_set_pyx and tries to
756
* "import _simple_set_pyx" but it is available only as
757
* "import bzrlib._simple_set_pyx"
758
* It is a shame to hack up sys.modules, but that is what we've got to do.
760
PyObject *sys_module = NULL, *modules = NULL, *set_module = NULL;
763
/* Clear out the current ImportError exception, and try again. */
765
/* Note that this only seems to work if somewhere else imports
766
* bzrlib._simple_set_pyx before importing bzrlib._static_tuple_c
768
set_module = PyImport_ImportModule("bzrlib._simple_set_pyx");
769
if (set_module == NULL) {
770
// fprintf(stderr, "Failed to import bzrlib._simple_set_pyx\n");
773
/* Add the _simple_set_pyx into sys.modules at the appropriate location. */
774
sys_module = PyImport_ImportModule("sys");
775
if (sys_module == NULL) {
776
// fprintf(stderr, "Failed to import sys\n");
779
modules = PyObject_GetAttrString(sys_module, "modules");
780
if (modules == NULL || !PyDict_Check(modules)) {
781
// fprintf(stderr, "Failed to find sys.modules\n");
784
PyDict_SetItemString(modules, "_simple_set_pyx", set_module);
785
/* Now that we have hacked it in, try the import again. */
786
retval = import_bzrlib___simple_set_pyx();
788
Py_XDECREF(set_module);
789
Py_XDECREF(sys_module);
796
init_static_tuple_c(void)
800
StaticTuple_Type.tp_getattro = PyObject_GenericGetAttr;
801
if (PyType_Ready(&StaticTuple_Type) < 0)
804
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
805
"C implementation of a StaticTuple structure");
809
Py_INCREF(&StaticTuple_Type);
810
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
811
if (import_bzrlib___simple_set_pyx() == -1
812
&& _workaround_pyrex_096() == -1)
816
setup_interned_tuples(m);
817
setup_empty_tuple(m);