1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
23
#include "_static_tuple_c.h"
24
#include "_export_c_api.h"
25
#include "_simple_set_pyx_api.h"
27
#include "python-compat.h"
30
# define inline __inline__
31
#elif defined(_MSC_VER)
32
# define inline __inline
38
/* The one and only StaticTuple with no values */
39
static StaticTuple *_empty_tuple = NULL;
40
static PyObject *_interned_tuples = NULL;
44
_StaticTuple_is_interned(StaticTuple *self)
46
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
52
StaticTuple_as_tuple(StaticTuple *self)
54
PyObject *tpl = NULL, *obj = NULL;
58
tpl = PyTuple_New(len);
63
for (i = 0; i < len; ++i) {
64
obj = (PyObject *)self->items[i];
66
PyTuple_SET_ITEM(tpl, i, obj);
72
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
75
StaticTuple_Intern(StaticTuple *self)
77
PyObject *unique_key = NULL;
79
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
83
/* SimpleSet_Add returns whatever object is present at self
84
* or the new object if it needs to add it.
86
unique_key = SimpleSet_Add(_interned_tuples, (PyObject *)self);
88
// Suppress any error and just return the object
93
if (unique_key != (PyObject *)self) {
94
// There was already a key at that location
95
return (StaticTuple *)unique_key;
97
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
98
// The two references in the dict do not count, so that the StaticTuple object
99
// does not become immortal just because it was interned.
100
Py_REFCNT(self) -= 1;
104
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
105
"Return a 'canonical' StaticTuple object.\n"
106
"Similar to intern() for strings, this makes sure there\n"
107
"is only one StaticTuple object for a given value\n."
109
" key = StaticTuple('foo', 'bar').intern()\n";
113
StaticTuple_dealloc(StaticTuple *self)
117
if (_StaticTuple_is_interned(self)) {
118
/* revive dead object temporarily for Discard */
120
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
121
Py_FatalError("deletion of interned StaticTuple failed");
122
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
125
for (i = 0; i < len; ++i) {
126
Py_XDECREF(self->items[i]);
128
Py_TYPE(self)->tp_free((PyObject *)self);
132
/* Similar to PyTuple_New() */
134
StaticTuple_New(Py_ssize_t size)
138
PyErr_BadInternalCall();
142
if (size == 0 && _empty_tuple != NULL) {
143
Py_INCREF(_empty_tuple);
146
/* Note that we use PyObject_NewVar because we want to allocate a variable
147
* width entry. However we *aren't* truly a PyVarObject because we don't
148
* use a long for ob_size. Instead we use a plain 'size' that is an int,
149
* and will be overloaded with flags in the future.
150
* As such we do the alloc, and then have to clean up anything it does
153
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
154
if (stuple == NULL) {
159
stuple->_unused0 = 0;
160
stuple->_unused1 = 0;
162
memset(stuple->items, 0, sizeof(PyObject *) * size);
164
#if STATIC_TUPLE_HAS_HASH
172
StaticTuple_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
175
PyObject *obj = NULL;
176
Py_ssize_t i, len = 0;
178
if (type != &StaticTuple_Type) {
179
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
182
if (!PyTuple_CheckExact(args)) {
183
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
186
len = PyTuple_GET_SIZE(args);
187
if (len < 0 || len > 255) {
188
/* Too big or too small */
189
PyErr_SetString(PyExc_ValueError, "StaticTuple.__init__(...)"
190
" takes from 0 to 255 key bits");
193
self = (StaticTuple *)StaticTuple_New(len);
197
for (i = 0; i < len; ++i) {
198
obj = PyTuple_GET_ITEM(args, i);
199
if (!PyString_CheckExact(obj)) {
200
if (!StaticTuple_CheckExact(obj)) {
201
PyErr_SetString(PyExc_TypeError, "StaticTuple.__init__(...)"
202
" requires that all key bits are strings or StaticTuple.");
203
/* TODO: What is the proper way to dealloc ? */
204
type->tp_dealloc((PyObject *)self);
209
self->items[i] = obj;
211
return (PyObject *)self;
215
StaticTuple_repr(StaticTuple *self)
217
PyObject *as_tuple, *tuple_repr, *result;
219
as_tuple = StaticTuple_as_tuple(self);
220
if (as_tuple == NULL) {
223
tuple_repr = PyObject_Repr(as_tuple);
225
if (tuple_repr == NULL) {
228
result = PyString_FromFormat("%s%s", Py_TYPE(self)->tp_name,
229
PyString_AsString(tuple_repr));
234
StaticTuple_hash(StaticTuple *self)
236
/* adapted from tuplehash(), is the specific hash value considered
240
Py_ssize_t len = self->size;
242
long mult = 1000003L;
244
#if STATIC_TUPLE_HAS_HASH
245
if (self->hash != -1) {
251
// TODO: We could set specific flags if we know that, for example, all the
252
// keys are strings. I haven't seen a real-world benefit to that yet,
255
y = PyObject_Hash(*p++);
256
if (y == -1) /* failure */
259
/* the cast might truncate len; that doesn't change hash stability */
260
mult += (long)(82520L + len + len);
265
#if STATIC_TUPLE_HAS_HASH
266
if (self->hash != -1) {
267
if (self->hash != x) {
268
fprintf(stderr, "hash changed: %d => %d\n", self->hash, x);
277
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
280
PyObject *result = NULL;
282
vt = StaticTuple_as_tuple((StaticTuple *)v);
286
if (!PyTuple_Check(wt)) {
287
PyErr_BadInternalCall();
291
/* Now we have 2 tuples to compare, do it */
292
result = PyTuple_Type.tp_richcompare(vt, wt, op);
300
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
302
StaticTuple *vk, *wk;
303
Py_ssize_t vlen, wlen, min_len, i;
304
PyObject *v_obj, *w_obj;
305
richcmpfunc string_richcompare;
307
if (!StaticTuple_CheckExact(v)) {
308
/* This has never triggered, according to python-dev it seems this
309
* might trigger if '__op__' is defined but '__rop__' is not, sort of
310
* case. Such as "None == StaticTuple()"
312
fprintf(stderr, "self is not StaticTuple\n");
313
Py_INCREF(Py_NotImplemented);
314
return Py_NotImplemented;
316
vk = (StaticTuple *)v;
317
if (StaticTuple_CheckExact(w)) {
318
/* The most common case */
319
wk = (StaticTuple*)w;
320
} else if (PyTuple_Check(w)) {
321
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
324
/* TODO: This seems to be triggering more than I thought it would...
325
* We probably want to optimize comparing self to other when
328
return StaticTuple_richcompare_to_tuple(vk, w, op);
329
} else if (w == Py_None) {
330
// None is always less than the object
332
case Py_NE:case Py_GT:case Py_GE:
335
case Py_EQ:case Py_LT:case Py_LE:
340
/* We don't special case this comparison, we just let python handle
343
Py_INCREF(Py_NotImplemented);
344
return Py_NotImplemented;
346
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
347
* This code is somewhat borrowed from tuplerichcompare, except we know our
348
* objects are limited in scope, so we can inline some comparisons.
351
/* Identical pointers, we can shortcut this easily. */
353
case Py_EQ:case Py_LE:case Py_GE:
356
case Py_NE:case Py_LT:case Py_GT:
361
/* TODO: if STATIC_TUPLE_INTERNED_FLAG is set on both objects and they are
362
* not the same pointer, then we know they aren't the same object
363
* without having to do sub-by-sub comparison.
366
/* It will be rare that we compare tuples of different lengths, so we don't
367
* start by optimizing the length comparision, same as the tuple code
368
* TODO: Interning may change this, because we'll be comparing lots of
369
* different StaticTuple objects in the intern dict
373
min_len = (vlen < wlen) ? vlen : wlen;
374
string_richcompare = PyString_Type.tp_richcompare;
375
for (i = 0; i < min_len; i++) {
376
PyObject *result = NULL;
377
v_obj = StaticTuple_GET_ITEM(vk, i);
378
w_obj = StaticTuple_GET_ITEM(wk, i);
379
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
380
result = string_richcompare(v_obj, w_obj, Py_EQ);
381
} else if (StaticTuple_CheckExact(v_obj) &&
382
StaticTuple_CheckExact(w_obj))
384
/* Both are StaticTuple types, so recurse */
385
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
387
/* Not the same type, obviously they won't compare equal */
390
if (result == NULL) {
391
return NULL; /* There seems to be an error */
393
if (result == Py_NotImplemented) {
394
PyErr_BadInternalCall();
398
if (result == Py_False) {
399
/* This entry is not identical
408
if (result != Py_True) {
409
/* We don't know *what* richcompare is returning, but it
410
* isn't something we recognize
412
PyErr_BadInternalCall();
418
if (i >= vlen || i >= wlen) {
419
/* We walked off one of the lists, but everything compared equal so
420
* far. Just compare the size.
425
case Py_LT: cmp = vlen < wlen; break;
426
case Py_LE: cmp = vlen <= wlen; break;
427
case Py_EQ: cmp = vlen == wlen; break;
428
case Py_NE: cmp = vlen != wlen; break;
429
case Py_GT: cmp = vlen > wlen; break;
430
case Py_GE: cmp = vlen >= wlen; break;
431
default: return NULL; /* cannot happen */
440
/* The last item differs, shortcut the Py_NE case */
445
/* It is some other comparison, go ahead and do the real check. */
446
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
448
return string_richcompare(v_obj, w_obj, op);
449
} else if (StaticTuple_CheckExact(v_obj) &&
450
StaticTuple_CheckExact(w_obj))
452
/* Both are StaticTuple types, so recurse */
453
return StaticTuple_richcompare(v_obj, w_obj, op);
455
Py_INCREF(Py_NotImplemented);
456
return Py_NotImplemented;
462
StaticTuple_length(StaticTuple *self)
469
StaticTuple__is_interned(StaticTuple *self)
471
if (_StaticTuple_is_interned(self)) {
479
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
480
"Check to see if this key has been interned.\n";
484
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
487
if (offset < 0 || offset >= self->size) {
488
PyErr_SetString(PyExc_IndexError, "StaticTuple index out of range");
491
obj = (PyObject *)self->items[offset];
497
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
499
PyObject *as_tuple, *result;
501
as_tuple = StaticTuple_as_tuple(self);
502
if (as_tuple == NULL) {
505
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
511
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
514
for (i = self->size; --i >= 0;) {
515
Py_VISIT(self->items[i]);
520
static char StaticTuple_doc[] =
521
"C implementation of a StaticTuple structure."
522
"\n This is used as StaticTuple(key_bit_1, key_bit_2, key_bit_3, ...)"
523
"\n This is similar to tuple, just less flexible in what it"
524
"\n supports, but also lighter memory consumption.";
526
static PyMethodDef StaticTuple_methods[] = {
527
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
528
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
529
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
530
StaticTuple__is_interned_doc},
531
{NULL, NULL} /* sentinel */
534
static PySequenceMethods StaticTuple_as_sequence = {
535
(lenfunc)StaticTuple_length, /* sq_length */
538
(ssizeargfunc)StaticTuple_item, /* sq_item */
539
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
541
0, /* sq_ass_slice */
546
PyTypeObject StaticTuple_Type = {
547
PyObject_HEAD_INIT(NULL)
549
"StaticTuple", /* tp_name */
550
sizeof(StaticTuple), /* tp_basicsize */
551
sizeof(PyObject *), /* tp_itemsize */
552
(destructor)StaticTuple_dealloc, /* tp_dealloc */
557
(reprfunc)StaticTuple_repr, /* tp_repr */
558
0, /* tp_as_number */
559
&StaticTuple_as_sequence, /* tp_as_sequence */
560
0, /* tp_as_mapping */
561
(hashfunc)StaticTuple_hash, /* tp_hash */
564
PyObject_GenericGetAttr, /* tp_getattro */
566
0, /* tp_as_buffer */
567
Py_TPFLAGS_DEFAULT, /* tp_flags*/
568
StaticTuple_doc, /* tp_doc */
569
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
570
* And we don't include this object in the garbage collector because we
571
* know it doesn't create cycles. However, 'meliae' will follow
572
* tp_traverse, even if the object isn't GC, and we want that.
574
(traverseproc)StaticTuple_traverse, /* tp_traverse */
576
StaticTuple_richcompare, /* tp_richcompare */
577
0, /* tp_weaklistoffset */
578
// without implementing tp_iter, Python will fall back to PySequence*
579
// which seems to work ok, we may need something faster/lighter in the
583
StaticTuple_methods, /* tp_methods */
588
0, /* tp_descr_get */
589
0, /* tp_descr_set */
590
0, /* tp_dictoffset */
593
StaticTuple_new, /* tp_new */
597
static PyMethodDef static_tuple_c_methods[] = {
603
setup_interned_tuples(PyObject *m)
605
_interned_tuples = (PyObject *)SimpleSet_New();
606
if (_interned_tuples != NULL) {
607
Py_INCREF(_interned_tuples);
608
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
614
setup_empty_tuple(PyObject *m)
617
if (_interned_tuples == NULL) {
618
fprintf(stderr, "You need to call setup_interned_tuples() before"
619
" setup_empty_tuple, because we intern it.\n");
621
// We need to create the empty tuple
622
stuple = (StaticTuple *)StaticTuple_New(0);
623
_empty_tuple = StaticTuple_Intern(stuple);
624
assert(_empty_tuple == stuple);
625
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
626
// intern(). We will keep 1 for the _empty_tuple global, and use the other
627
// for the module reference.
628
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
632
_StaticTuple_CheckExact(PyObject *obj)
634
return StaticTuple_CheckExact(obj);
638
setup_c_api(PyObject *m)
640
_export_function(m, "StaticTuple_New", StaticTuple_New,
641
"StaticTuple *(Py_ssize_t)");
642
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
643
"StaticTuple *(StaticTuple *)");
644
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
650
init_static_tuple_c(void)
654
if (PyType_Ready(&StaticTuple_Type) < 0)
657
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
658
"C implementation of a StaticTuple structure");
662
Py_INCREF(&StaticTuple_Type);
663
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
664
if (import_bzrlib___simple_set_pyx() == -1) {
665
// We failed to set up, stop early
668
setup_interned_tuples(m);
669
setup_empty_tuple(m);