1
/* Copyright (C) 2009, 2010 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
24
#include "python-compat.h"
26
#include "_static_tuple_c.h"
27
#include "_export_c_api.h"
29
#include "_simple_set_pyx_api.h"
32
# define inline __inline__
33
#elif defined(_MSC_VER)
34
# define inline __inline
40
/* The one and only StaticTuple with no values */
41
static StaticTuple *_empty_tuple = NULL;
42
static PyObject *_interned_tuples = NULL;
46
_StaticTuple_is_interned(StaticTuple *self)
48
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
54
StaticTuple_as_tuple(StaticTuple *self)
56
PyObject *tpl = NULL, *obj = NULL;
60
tpl = PyTuple_New(len);
65
for (i = 0; i < len; ++i) {
66
obj = (PyObject *)self->items[i];
68
PyTuple_SET_ITEM(tpl, i, obj);
74
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
77
StaticTuple_Intern(StaticTuple *self)
79
PyObject *canonical_tuple = NULL;
81
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
85
/* SimpleSet_Add returns whatever object is present at self
86
* or the new object if it needs to add it.
88
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
89
if (!canonical_tuple) {
90
// Some sort of exception, propogate it.
93
if (canonical_tuple != (PyObject *)self) {
94
// There was already a tuple with that value
95
return (StaticTuple *)canonical_tuple;
97
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
98
// The two references in the dict do not count, so that the StaticTuple
99
// object does not become immortal just because it was interned.
100
Py_REFCNT(self) -= 1;
104
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
105
"Return a 'canonical' StaticTuple object.\n"
106
"Similar to intern() for strings, this makes sure there\n"
107
"is only one StaticTuple object for a given value\n."
109
" key = StaticTuple('foo', 'bar').intern()\n";
113
StaticTuple_dealloc(StaticTuple *self)
117
if (_StaticTuple_is_interned(self)) {
118
/* revive dead object temporarily for Discard */
120
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
121
Py_FatalError("deletion of interned StaticTuple failed");
122
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
125
for (i = 0; i < len; ++i) {
126
Py_XDECREF(self->items[i]);
128
Py_TYPE(self)->tp_free((PyObject *)self);
132
/* Similar to PyTuple_New() */
134
StaticTuple_New(Py_ssize_t size)
138
if (size < 0 || size > 255) {
139
/* Too big or too small */
140
PyErr_SetString(PyExc_ValueError, "StaticTuple(...)"
141
" takes from 0 to 255 items");
144
if (size == 0 && _empty_tuple != NULL) {
145
Py_INCREF(_empty_tuple);
148
/* Note that we use PyObject_NewVar because we want to allocate a variable
149
* width entry. However we *aren't* truly a PyVarObject because we don't
150
* use a long for ob_size. Instead we use a plain 'size' that is an int,
151
* and will be overloaded with flags in the future.
152
* As such we do the alloc, and then have to clean up anything it does
155
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
156
if (stuple == NULL) {
161
stuple->_unused0 = 0;
162
stuple->_unused1 = 0;
164
memset(stuple->items, 0, sizeof(PyObject *) * size);
166
#if STATIC_TUPLE_HAS_HASH
174
StaticTuple_FromSequence(PyObject *sequence)
176
StaticTuple *new = NULL;
177
PyObject *as_tuple = NULL;
181
if (StaticTuple_CheckExact(sequence)) {
183
return (StaticTuple *)sequence;
185
if (!PySequence_Check(sequence)) {
186
as_tuple = PySequence_Tuple(sequence);
187
if (as_tuple == NULL)
191
size = PySequence_Size(sequence);
195
new = StaticTuple_New(size);
199
for (i = 0; i < size; ++i) {
200
// This returns a new reference, which we then 'steal' with
201
// StaticTuple_SET_ITEM
202
item = PySequence_GetItem(sequence, i);
208
StaticTuple_SET_ITEM(new, i, item);
211
Py_XDECREF(as_tuple);
212
return (StaticTuple *)new;
216
StaticTuple_from_sequence(PyObject *self, PyObject *args, PyObject *kwargs)
219
if (!PyArg_ParseTuple(args, "O", &sequence))
221
return StaticTuple_FromSequence(sequence);
225
/* Check that all items we point to are 'valid' */
227
StaticTuple_check_items(StaticTuple *self)
232
for (i = 0; i < self->size; ++i) {
233
obj = self->items[i];
235
PyErr_SetString(PyExc_RuntimeError, "StaticTuple(...)"
236
" should not have a NULL entry.");
239
if (PyBytes_CheckExact(obj)
240
|| StaticTuple_CheckExact(obj)
243
#if PY_MAJOR_VERSION >= 3
245
|| PyInt_CheckExact(obj)
247
|| PyLong_CheckExact(obj)
248
|| PyFloat_CheckExact(obj)
249
|| PyUnicode_CheckExact(obj)
251
PyErr_Format(PyExc_TypeError, "StaticTuple(...)"
252
" requires that all items are one of"
253
" str, StaticTuple, None, bool, int, long, float, or unicode"
254
" not %s.", Py_TYPE(obj)->tp_name);
261
StaticTuple_new_constructor(PyTypeObject *type, PyObject *args, PyObject *kwds)
264
PyObject *obj = NULL;
265
Py_ssize_t i, len = 0;
267
if (type != &StaticTuple_Type) {
268
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
271
if (!PyTuple_CheckExact(args)) {
272
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
275
len = PyTuple_GET_SIZE(args);
276
if (len < 0 || len > 255) {
277
/* Check the length here so we can raise a TypeError instead of
278
* StaticTuple_New's ValueError.
280
PyErr_SetString(PyExc_TypeError, "StaticTuple(...)"
281
" takes from 0 to 255 items");
284
self = (StaticTuple *)StaticTuple_New(len);
288
for (i = 0; i < len; ++i) {
289
obj = PyTuple_GET_ITEM(args, i);
291
self->items[i] = obj;
293
if (!StaticTuple_check_items(self)) {
294
type->tp_dealloc((PyObject *)self);
297
return (PyObject *)self;
301
StaticTuple_repr(StaticTuple *self)
303
PyObject *as_tuple, *tuple_repr, *result;
305
as_tuple = StaticTuple_as_tuple(self);
306
if (as_tuple == NULL) {
309
tuple_repr = PyObject_Repr(as_tuple);
311
if (tuple_repr == NULL) {
314
#if PY_MAJOR_VERSION >= 3
315
result = PyUnicode_FromFormat("StaticTuple%U", tuple_repr);
317
result = PyString_FromFormat("StaticTuple%s",
318
PyString_AsString(tuple_repr));
323
/* adapted from tuplehash(), is the specific hash value considered
327
#if PY_MAJOR_VERSION > 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION >= 8)
328
/* Hash for tuples. This is a slightly simplified version of the xxHash
329
non-cryptographic hash:
330
- we do not use any parallellism, there is only 1 accumulator.
331
- we drop the final mixing since this is just a permutation of the
332
output space: it does not help against collisions.
333
- at the end, we mangle the length with a single constant.
334
For the xxHash specification, see
335
https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
337
Below are the official constants from the xxHash specification. Optimizing
338
compilers should emit a single "rotate" instruction for the
339
_PyHASH_XXROTATE() expansion. If that doesn't happen for some important
340
platform, the macro could be changed to expand to a platform-specific rotate
343
#if SIZEOF_PY_UHASH_T > 4
344
#define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
345
#define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
346
#define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
347
#define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
349
#define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
350
#define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
351
#define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
352
#define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
355
/* Tests have shown that it's not worth to cache the hash value, see
356
https://bugs.python.org/issue9685 */
358
StaticTuple_hash(StaticTuple *self)
360
Py_ssize_t i, len = self->size;
361
PyObject **item = self->items;
363
#if STATIC_TUPLE_HAS_HASH
364
if (self->hash != -1) {
369
Py_uhash_t acc = _PyHASH_XXPRIME_5;
370
for (i = 0; i < len; i++) {
371
Py_uhash_t lane = PyObject_Hash(item[i]);
372
if (lane == (Py_uhash_t)-1) {
375
acc += lane * _PyHASH_XXPRIME_2;
376
acc = _PyHASH_XXROTATE(acc);
377
acc *= _PyHASH_XXPRIME_1;
380
/* Add input length, mangled to keep the historical value of hash(()). */
381
acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
383
if (acc == (Py_uhash_t)-1) {
387
#if STATIC_TUPLE_HAS_HASH
396
StaticTuple_hash(StaticTuple *self)
398
/* adapted from tuplehash(), is the specific hash value considered
402
Py_ssize_t len = self->size;
404
long mult = 1000003L;
406
#if STATIC_TUPLE_HAS_HASH
407
if (self->hash != -1) {
413
// TODO: We could set specific flags if we know that, for example, all the
414
// items are strings. I haven't seen a real-world benefit to that
417
y = PyObject_Hash(*p++);
418
if (y == -1) /* failure */
421
/* the cast might truncate len; that doesn't change hash stability */
422
mult += (long)(82520L + len + len);
427
#if STATIC_TUPLE_HAS_HASH
435
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
438
PyObject *result = NULL;
440
vt = StaticTuple_as_tuple((StaticTuple *)v);
444
if (!PyTuple_Check(wt)) {
445
PyErr_BadInternalCall();
448
/* Now we have 2 tuples to compare, do it */
449
result = PyTuple_Type.tp_richcompare(vt, wt, op);
455
/** Compare two objects to determine if they are equivalent.
456
* The basic flow is as follows
457
* 1) First make sure that both objects are StaticTuple instances. If they
458
* aren't then cast self to a tuple, and have the tuple do the comparison.
459
* 2) Special case comparison to Py_None, because it happens to occur fairly
460
* often in the test suite.
461
* 3) Special case when v and w are the same pointer. As we know the answer to
462
* all queries without walking individual items.
463
* 4) For all operations, we then walk the items to find the first paired
464
* items that are not equal.
465
* 5) If all items found are equal, we then check the length of self and
466
* other to determine equality.
467
* 6) If an item differs, then we apply "op" to those last two items. (eg.
468
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
472
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
474
StaticTuple *v_st, *w_st;
475
Py_ssize_t vlen, wlen, min_len, i;
476
PyObject *v_obj, *w_obj;
477
richcmpfunc string_richcompare;
479
if (!StaticTuple_CheckExact(v)) {
480
/* This has never triggered, according to python-dev it seems this
481
* might trigger if '__op__' is defined but '__rop__' is not, sort of
482
* case. Such as "None == StaticTuple()"
484
fprintf(stderr, "self is not StaticTuple\n");
485
Py_INCREF(Py_NotImplemented);
486
return Py_NotImplemented;
488
v_st = (StaticTuple *)v;
489
if (StaticTuple_CheckExact(w)) {
490
/* The most common case */
491
w_st = (StaticTuple*)w;
492
} else if (PyTuple_Check(w)) {
493
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
496
/* TODO: This seems to be triggering more than I thought it would...
497
* We probably want to optimize comparing self to other when
500
return StaticTuple_richcompare_to_tuple(v_st, w, op);
501
} else if (w == Py_None) {
502
// None is always less than the object
505
#if PY_MAJOR_VERSION >= 3
507
case Py_GT:case Py_GE:
512
#if PY_MAJOR_VERSION >= 3
514
case Py_LT:case Py_LE:
518
default: // Should only happen on Python 3
519
return Py_NotImplemented;
522
/* We don't special case this comparison, we just let python handle
525
Py_INCREF(Py_NotImplemented);
526
return Py_NotImplemented;
528
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
529
* This code is inspired from tuplerichcompare, except we know our
530
* objects are limited in scope, so we can inline some comparisons.
533
/* Identical pointers, we can shortcut this easily. */
535
case Py_EQ:case Py_LE:case Py_GE:
538
case Py_NE:case Py_LT:case Py_GT:
544
&& _StaticTuple_is_interned(v_st)
545
&& _StaticTuple_is_interned(w_st))
547
/* If both objects are interned, we know they are different if the
548
* pointer is not the same, which would have been handled by the
549
* previous if. No need to compare the entries.
555
/* The only time we are likely to compare items of different lengths is in
556
* something like the interned_keys set. However, the hash is good enough
557
* that it is rare. Note that 'tuple_richcompare' also does not compare
562
min_len = (vlen < wlen) ? vlen : wlen;
563
string_richcompare = PyBytes_Type.tp_richcompare;
564
for (i = 0; i < min_len; i++) {
565
PyObject *result = NULL;
566
v_obj = StaticTuple_GET_ITEM(v_st, i);
567
w_obj = StaticTuple_GET_ITEM(w_st, i);
568
if (v_obj == w_obj) {
569
/* Shortcut case, these must be identical */
572
if (PyBytes_CheckExact(v_obj) && PyBytes_CheckExact(w_obj)) {
573
result = string_richcompare(v_obj, w_obj, Py_EQ);
574
} else if (StaticTuple_CheckExact(v_obj) &&
575
StaticTuple_CheckExact(w_obj))
577
/* Both are StaticTuple types, so recurse */
578
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
580
/* Fall back to generic richcompare */
581
result = PyObject_RichCompare(v_obj, w_obj, Py_EQ);
583
if (result == NULL) {
584
return NULL; /* There seems to be an error */
586
if (result == Py_False) {
587
// This entry is not identical, Shortcut for Py_EQ
594
if (result != Py_True) {
595
/* We don't know *what* richcompare is returning, but it
596
* isn't something we recognize
598
PyErr_BadInternalCall();
605
/* We walked off one of the lists, but everything compared equal so
606
* far. Just compare the size.
611
case Py_LT: cmp = vlen < wlen; break;
612
case Py_LE: cmp = vlen <= wlen; break;
613
case Py_EQ: cmp = vlen == wlen; break;
614
case Py_NE: cmp = vlen != wlen; break;
615
case Py_GT: cmp = vlen > wlen; break;
616
case Py_GE: cmp = vlen >= wlen; break;
617
default: return NULL; /* cannot happen */
626
/* The last item differs, shortcut the Py_NE case */
631
/* It is some other comparison, go ahead and do the real check. */
632
if (PyBytes_CheckExact(v_obj) && PyBytes_CheckExact(w_obj))
634
return string_richcompare(v_obj, w_obj, op);
635
} else if (StaticTuple_CheckExact(v_obj) &&
636
StaticTuple_CheckExact(w_obj))
638
/* Both are StaticTuple types, so recurse */
639
return StaticTuple_richcompare(v_obj, w_obj, op);
641
return PyObject_RichCompare(v_obj, w_obj, op);
647
StaticTuple_length(StaticTuple *self)
654
StaticTuple__is_interned(StaticTuple *self)
656
if (_StaticTuple_is_interned(self)) {
664
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
665
"Check to see if this tuple has been interned.\n";
669
StaticTuple_reduce(StaticTuple *self)
671
PyObject *result = NULL, *as_tuple = NULL;
673
result = PyTuple_New(2);
677
as_tuple = StaticTuple_as_tuple(self);
678
if (as_tuple == NULL) {
682
Py_INCREF(&StaticTuple_Type);
683
PyTuple_SET_ITEM(result, 0, (PyObject *)&StaticTuple_Type);
684
PyTuple_SET_ITEM(result, 1, as_tuple);
688
static char StaticTuple_reduce_doc[] = "__reduce__() => tuple\n";
692
StaticTuple_add(PyObject *v, PyObject *w)
694
Py_ssize_t i, len_v, len_w;
697
/* StaticTuples and plain tuples may be added (concatenated) to
700
if (StaticTuple_CheckExact(v)) {
701
len_v = ((StaticTuple*)v)->size;
702
} else if (PyTuple_Check(v)) {
703
len_v = PyTuple_GET_SIZE(v);
705
Py_INCREF(Py_NotImplemented);
706
return Py_NotImplemented;
708
if (StaticTuple_CheckExact(w)) {
709
len_w = ((StaticTuple*)w)->size;
710
} else if (PyTuple_Check(w)) {
711
len_w = PyTuple_GET_SIZE(w);
713
Py_INCREF(Py_NotImplemented);
714
return Py_NotImplemented;
716
result = StaticTuple_New(len_v + len_w);
719
for (i = 0; i < len_v; ++i) {
720
// This returns a new reference, which we then 'steal' with
721
// StaticTuple_SET_ITEM
722
item = PySequence_GetItem(v, i);
727
StaticTuple_SET_ITEM(result, i, item);
729
for (i = 0; i < len_w; ++i) {
730
item = PySequence_GetItem(w, i);
735
StaticTuple_SET_ITEM(result, i+len_v, item);
737
if (!StaticTuple_check_items(result)) {
741
return (PyObject *)result;
745
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
748
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
749
* long long, etc. offsets should never be >2**31 anyway.
752
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
753
" negative indices: %d\n", (int)offset);
754
} else if (offset >= self->size) {
755
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
756
" %d >= %d", (int)offset, (int)self->size);
759
obj = (PyObject *)self->items[offset];
764
#if PY_MAJOR_VERSION >= 3
767
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
769
PyObject *as_tuple, *result;
771
as_tuple = StaticTuple_as_tuple(self);
772
if (as_tuple == NULL) {
775
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
782
StaticTuple_subscript(StaticTuple *self, PyObject *key)
784
PyObject *as_tuple, *result;
786
as_tuple = StaticTuple_as_tuple(self);
787
if (as_tuple == NULL) {
790
result = PyTuple_Type.tp_as_mapping->mp_subscript(as_tuple, key);
796
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
799
for (i = self->size; --i >= 0;) {
800
Py_VISIT(self->items[i]);
807
StaticTuple_sizeof(StaticTuple *self)
811
res = _PyObject_SIZE(&StaticTuple_Type) + (int)self->size * sizeof(void*);
812
return PyInt_FromSsize_t(res);
817
static char StaticTuple_doc[] =
818
"C implementation of a StaticTuple structure."
819
"\n This is used as StaticTuple(item1, item2, item3)"
820
"\n This is similar to tuple, less flexible in what it"
821
"\n supports, but also lighter memory consumption."
822
"\n Note that the constructor mimics the () form of tuples"
823
"\n Rather than the 'tuple()' constructor."
824
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
826
static PyMethodDef StaticTuple_methods[] = {
827
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
828
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
829
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
830
StaticTuple__is_interned_doc},
831
{"from_sequence", (PyCFunction)StaticTuple_from_sequence,
832
METH_STATIC | METH_VARARGS,
833
"Create a StaticTuple from a given sequence. This functions"
834
" the same as the tuple() constructor."},
835
{"__reduce__", (PyCFunction)StaticTuple_reduce, METH_NOARGS, StaticTuple_reduce_doc},
836
{"__sizeof__", (PyCFunction)StaticTuple_sizeof, METH_NOARGS},
837
{NULL, NULL} /* sentinel */
841
static PyNumberMethods StaticTuple_as_number = {
842
(binaryfunc) StaticTuple_add, /* nb_add */
846
0, /* nb_remainder */
863
static PySequenceMethods StaticTuple_as_sequence = {
864
(lenfunc)StaticTuple_length, /* sq_length */
867
(ssizeargfunc)StaticTuple_item, /* sq_item */
868
#if PY_MAJOR_VERSION >= 3
870
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
873
0, /* sq_ass_slice */
875
#if PY_MAJOR_VERSION >= 3
876
0, /* sq_inplace_concat */
877
0, /* sq_inplace_repeat */
882
static PyMappingMethods StaticTuple_as_mapping = {
883
(lenfunc)StaticTuple_length, /* mp_length */
884
(binaryfunc)StaticTuple_subscript, /* mp_subscript */
885
0, /* mp_ass_subscript */
889
PyTypeObject StaticTuple_Type = {
890
PyVarObject_HEAD_INIT(NULL, 0)
891
"breezy._static_tuple_c.StaticTuple", /* tp_name */
892
sizeof(StaticTuple), /* tp_basicsize */
893
sizeof(PyObject *), /* tp_itemsize */
894
(destructor)StaticTuple_dealloc, /* tp_dealloc */
899
(reprfunc)StaticTuple_repr, /* tp_repr */
900
&StaticTuple_as_number, /* tp_as_number */
901
&StaticTuple_as_sequence, /* tp_as_sequence */
902
&StaticTuple_as_mapping, /* tp_as_mapping */
903
(hashfunc)StaticTuple_hash, /* tp_hash */
908
0, /* tp_as_buffer */
909
/* Py_TPFLAGS_CHECKTYPES tells the number operations that they shouldn't
910
* try to 'coerce' but instead stuff like 'add' will check it arguments.
912
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES, /* tp_flags*/
913
StaticTuple_doc, /* tp_doc */
914
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
915
* And we don't include this object in the garbage collector because we
916
* know it doesn't create cycles. However, 'meliae' will follow
917
* tp_traverse, even if the object isn't GC, and we want that.
919
(traverseproc)StaticTuple_traverse, /* tp_traverse */
921
StaticTuple_richcompare, /* tp_richcompare */
922
0, /* tp_weaklistoffset */
923
// without implementing tp_iter, Python will fall back to PySequence*
924
// which seems to work ok, we may need something faster/lighter in the
928
StaticTuple_methods, /* tp_methods */
933
0, /* tp_descr_get */
934
0, /* tp_descr_set */
935
0, /* tp_dictoffset */
938
StaticTuple_new_constructor, /* tp_new */
942
static PyMethodDef static_tuple_c_methods[] = {
948
setup_interned_tuples(PyObject *m)
950
_interned_tuples = (PyObject *)SimpleSet_New();
951
if (_interned_tuples != NULL) {
952
Py_INCREF(_interned_tuples);
953
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
959
setup_empty_tuple(PyObject *m)
962
if (_interned_tuples == NULL) {
963
fprintf(stderr, "You need to call setup_interned_tuples() before"
964
" setup_empty_tuple, because we intern it.\n");
966
// We need to create the empty tuple
967
stuple = (StaticTuple *)StaticTuple_New(0);
968
_empty_tuple = StaticTuple_Intern(stuple);
969
assert(_empty_tuple == stuple);
970
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
971
// intern(). We will keep 1 for the _empty_tuple global, and use the other
972
// for the module reference.
973
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
977
_StaticTuple_CheckExact(PyObject *obj)
979
return StaticTuple_CheckExact(obj);
983
setup_c_api(PyObject *m)
985
_export_function(m, "StaticTuple_New", StaticTuple_New,
986
"StaticTuple *(Py_ssize_t)");
987
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
988
"StaticTuple *(StaticTuple *)");
989
_export_function(m, "StaticTuple_FromSequence", StaticTuple_FromSequence,
990
"StaticTuple *(PyObject *)");
991
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
996
PYMOD_INIT_FUNC(_static_tuple_c)
1000
StaticTuple_Type.tp_getattro = PyObject_GenericGetAttr;
1001
if (PyType_Ready(&StaticTuple_Type) < 0) {
1005
PYMOD_CREATE(m, "_static_tuple_c",
1006
"C implementation of a StaticTuple structure",
1007
static_tuple_c_methods);
1012
Py_INCREF(&StaticTuple_Type);
1013
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
1014
if (import_breezy___simple_set_pyx() == -1) {
1017
setup_interned_tuples(m);
1018
setup_empty_tuple(m);
1021
return PYMOD_SUCCESS(m);
1024
// vim: tabstop=4 sw=4 expandtab