1
/* Copyright (C) 2009 Canonical Ltd
3
* This program is free software; you can redistribute it and/or modify
4
* it under the terms of the GNU General Public License as published by
5
* the Free Software Foundation; either version 2 of the License, or
6
* (at your option) any later version.
8
* This program is distributed in the hope that it will be useful,
9
* but WITHOUT ANY WARRANTY; without even the implied warranty of
10
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
* GNU General Public License for more details.
13
* You should have received a copy of the GNU General Public License
14
* along with this program; if not, write to the Free Software
15
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
/* Must be defined before importing _static_tuple_c.h so that we get the right
21
#define STATIC_TUPLE_MODULE
23
#include "_static_tuple_c.h"
24
#include "_export_c_api.h"
25
#include "_simple_set_pyx_api.h"
27
#include "python-compat.h"
30
# define inline __inline__
31
#elif defined(_MSC_VER)
32
# define inline __inline
38
/* The one and only StaticTuple with no values */
39
static StaticTuple *_empty_tuple = NULL;
40
static PyObject *_interned_tuples = NULL;
44
_StaticTuple_is_interned(StaticTuple *self)
46
return self->flags & STATIC_TUPLE_INTERNED_FLAG;
52
StaticTuple_as_tuple(StaticTuple *self)
54
PyObject *tpl = NULL, *obj = NULL;
58
tpl = PyTuple_New(len);
63
for (i = 0; i < len; ++i) {
64
obj = (PyObject *)self->items[i];
66
PyTuple_SET_ITEM(tpl, i, obj);
72
static char StaticTuple_as_tuple_doc[] = "as_tuple() => tuple";
75
StaticTuple_Intern(StaticTuple *self)
77
PyObject *canonical_tuple = NULL;
79
if (_interned_tuples == NULL || _StaticTuple_is_interned(self)) {
83
/* SimpleSet_Add returns whatever object is present at self
84
* or the new object if it needs to add it.
86
canonical_tuple = SimpleSet_Add(_interned_tuples, (PyObject *)self);
87
if (!canonical_tuple) {
88
// Some sort of exception, propogate it.
91
if (canonical_tuple != (PyObject *)self) {
92
// There was already a tuple with that value
93
return (StaticTuple *)canonical_tuple;
95
self->flags |= STATIC_TUPLE_INTERNED_FLAG;
96
// The two references in the dict do not count, so that the StaticTuple
97
// object does not become immortal just because it was interned.
102
static char StaticTuple_Intern_doc[] = "intern() => unique StaticTuple\n"
103
"Return a 'canonical' StaticTuple object.\n"
104
"Similar to intern() for strings, this makes sure there\n"
105
"is only one StaticTuple object for a given value\n."
107
" key = StaticTuple('foo', 'bar').intern()\n";
111
StaticTuple_dealloc(StaticTuple *self)
115
if (_StaticTuple_is_interned(self)) {
116
/* revive dead object temporarily for Discard */
118
if (SimpleSet_Discard(_interned_tuples, (PyObject*)self) != 1)
119
Py_FatalError("deletion of interned StaticTuple failed");
120
self->flags &= ~STATIC_TUPLE_INTERNED_FLAG;
123
for (i = 0; i < len; ++i) {
124
Py_XDECREF(self->items[i]);
126
Py_TYPE(self)->tp_free((PyObject *)self);
130
/* Similar to PyTuple_New() */
132
StaticTuple_New(Py_ssize_t size)
136
PyErr_BadInternalCall();
140
if (size == 0 && _empty_tuple != NULL) {
141
Py_INCREF(_empty_tuple);
144
/* Note that we use PyObject_NewVar because we want to allocate a variable
145
* width entry. However we *aren't* truly a PyVarObject because we don't
146
* use a long for ob_size. Instead we use a plain 'size' that is an int,
147
* and will be overloaded with flags in the future.
148
* As such we do the alloc, and then have to clean up anything it does
151
stuple = PyObject_NewVar(StaticTuple, &StaticTuple_Type, size);
152
if (stuple == NULL) {
157
stuple->_unused0 = 0;
158
stuple->_unused1 = 0;
160
memset(stuple->items, 0, sizeof(PyObject *) * size);
162
#if STATIC_TUPLE_HAS_HASH
170
StaticTuple_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
173
PyObject *obj = NULL;
174
Py_ssize_t i, len = 0;
176
if (type != &StaticTuple_Type) {
177
PyErr_SetString(PyExc_TypeError, "we only support creating StaticTuple");
180
if (!PyTuple_CheckExact(args)) {
181
PyErr_SetString(PyExc_TypeError, "args must be a tuple");
184
len = PyTuple_GET_SIZE(args);
185
if (len < 0 || len > 255) {
186
/* Too big or too small */
187
PyErr_SetString(PyExc_ValueError, "StaticTuple.__init__(...)"
188
" takes from 0 to 255 items");
191
self = (StaticTuple *)StaticTuple_New(len);
195
for (i = 0; i < len; ++i) {
196
obj = PyTuple_GET_ITEM(args, i);
197
if (!PyString_CheckExact(obj)) {
198
if (!StaticTuple_CheckExact(obj)) {
199
PyErr_SetString(PyExc_TypeError, "StaticTuple.__init__(...)"
200
" requires that all items are strings or StaticTuple.");
201
type->tp_dealloc((PyObject *)self);
206
self->items[i] = obj;
208
return (PyObject *)self;
212
StaticTuple_repr(StaticTuple *self)
214
PyObject *as_tuple, *tuple_repr, *result;
216
as_tuple = StaticTuple_as_tuple(self);
217
if (as_tuple == NULL) {
220
tuple_repr = PyObject_Repr(as_tuple);
222
if (tuple_repr == NULL) {
225
result = PyString_FromFormat("%s%s", Py_TYPE(self)->tp_name,
226
PyString_AsString(tuple_repr));
231
StaticTuple_hash(StaticTuple *self)
233
/* adapted from tuplehash(), is the specific hash value considered
237
Py_ssize_t len = self->size;
239
long mult = 1000003L;
241
#if STATIC_TUPLE_HAS_HASH
242
if (self->hash != -1) {
248
// TODO: We could set specific flags if we know that, for example, all the
249
// items are strings. I haven't seen a real-world benefit to that
252
y = PyObject_Hash(*p++);
253
if (y == -1) /* failure */
256
/* the cast might truncate len; that doesn't change hash stability */
257
mult += (long)(82520L + len + len);
262
#if STATIC_TUPLE_HAS_HASH
269
StaticTuple_richcompare_to_tuple(StaticTuple *v, PyObject *wt, int op)
272
PyObject *result = NULL;
274
vt = StaticTuple_as_tuple((StaticTuple *)v);
278
if (!PyTuple_Check(wt)) {
279
PyErr_BadInternalCall();
282
/* Now we have 2 tuples to compare, do it */
283
result = PyTuple_Type.tp_richcompare(vt, wt, op);
289
/** Compare two objects to determine if they are equivalent.
290
* The basic flow is as follows
291
* 1) First make sure that both objects are StaticTuple instances. If they
292
* aren't then cast self to a tuple, and have the tuple do the comparison.
293
* 2) Special case comparison to Py_None, because it happens to occur fairly
294
* often in the test suite.
295
* 3) Special case when v and w are the same pointer. As we know the answer to
296
* all queries without walking individual items.
297
* 4) For all operations, we then walk the items to find the first paired
298
* items that are not equal.
299
* 5) If all items found are equal, we then check the length of self and
300
* other to determine equality.
301
* 6) If an item differs, then we apply "op" to those last two items. (eg.
302
* StaticTuple(A, B) > StaticTuple(A, C) iff B > C)
306
StaticTuple_richcompare(PyObject *v, PyObject *w, int op)
308
StaticTuple *v_st, *w_st;
309
Py_ssize_t vlen, wlen, min_len, i;
310
PyObject *v_obj, *w_obj;
311
richcmpfunc string_richcompare;
313
if (!StaticTuple_CheckExact(v)) {
314
/* This has never triggered, according to python-dev it seems this
315
* might trigger if '__op__' is defined but '__rop__' is not, sort of
316
* case. Such as "None == StaticTuple()"
318
fprintf(stderr, "self is not StaticTuple\n");
319
Py_INCREF(Py_NotImplemented);
320
return Py_NotImplemented;
322
v_st = (StaticTuple *)v;
323
if (StaticTuple_CheckExact(w)) {
324
/* The most common case */
325
w_st = (StaticTuple*)w;
326
} else if (PyTuple_Check(w)) {
327
/* One of v or w is a tuple, so we go the 'slow' route and cast up to
330
/* TODO: This seems to be triggering more than I thought it would...
331
* We probably want to optimize comparing self to other when
334
return StaticTuple_richcompare_to_tuple(v_st, w, op);
335
} else if (w == Py_None) {
336
// None is always less than the object
338
case Py_NE:case Py_GT:case Py_GE:
341
case Py_EQ:case Py_LT:case Py_LE:
346
/* We don't special case this comparison, we just let python handle
349
Py_INCREF(Py_NotImplemented);
350
return Py_NotImplemented;
352
/* Now we know that we have 2 StaticTuple objects, so let's compare them.
353
* This code is inspired from tuplerichcompare, except we know our
354
* objects are limited in scope, so we can inline some comparisons.
357
/* Identical pointers, we can shortcut this easily. */
359
case Py_EQ:case Py_LE:case Py_GE:
362
case Py_NE:case Py_LT:case Py_GT:
368
&& _StaticTuple_is_interned(v_st)
369
&& _StaticTuple_is_interned(w_st))
371
/* If both objects are interned, we know they are different if the
372
* pointer is not the same, which would have been handled by the
373
* previous if. No need to compare the entries.
379
/* The only time we are likely to compare items of different lengths is in
380
* something like the interned_keys set. However, the hash is good enough
381
* that it is rare. Note that 'tuple_richcompare' also does not compare
386
min_len = (vlen < wlen) ? vlen : wlen;
387
string_richcompare = PyString_Type.tp_richcompare;
388
for (i = 0; i < min_len; i++) {
389
PyObject *result = NULL;
390
v_obj = StaticTuple_GET_ITEM(v_st, i);
391
w_obj = StaticTuple_GET_ITEM(w_st, i);
392
if (v_obj == w_obj) {
393
/* Shortcut case, these must be identical */
396
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj)) {
397
result = string_richcompare(v_obj, w_obj, Py_EQ);
398
} else if (StaticTuple_CheckExact(v_obj) &&
399
StaticTuple_CheckExact(w_obj))
401
/* Both are StaticTuple types, so recurse */
402
result = StaticTuple_richcompare(v_obj, w_obj, Py_EQ);
404
/* Not the same type, obviously they won't compare equal */
407
if (result == NULL) {
408
return NULL; /* There seems to be an error */
410
if (result == Py_NotImplemented) {
411
PyErr_BadInternalCall();
415
if (result == Py_False) {
416
/* This entry is not identical
425
if (result != Py_True) {
426
/* We don't know *what* richcompare is returning, but it
427
* isn't something we recognize
429
PyErr_BadInternalCall();
435
if (i >= vlen || i >= wlen) {
436
/* We walked off one of the lists, but everything compared equal so
437
* far. Just compare the size.
442
case Py_LT: cmp = vlen < wlen; break;
443
case Py_LE: cmp = vlen <= wlen; break;
444
case Py_EQ: cmp = vlen == wlen; break;
445
case Py_NE: cmp = vlen != wlen; break;
446
case Py_GT: cmp = vlen > wlen; break;
447
case Py_GE: cmp = vlen >= wlen; break;
448
default: return NULL; /* cannot happen */
457
/* The last item differs, shortcut the Py_NE case */
462
/* It is some other comparison, go ahead and do the real check. */
463
if (PyString_CheckExact(v_obj) && PyString_CheckExact(w_obj))
465
return string_richcompare(v_obj, w_obj, op);
466
} else if (StaticTuple_CheckExact(v_obj) &&
467
StaticTuple_CheckExact(w_obj))
469
/* Both are StaticTuple types, so recurse */
470
return StaticTuple_richcompare(v_obj, w_obj, op);
472
Py_INCREF(Py_NotImplemented);
473
return Py_NotImplemented;
479
StaticTuple_length(StaticTuple *self)
486
StaticTuple__is_interned(StaticTuple *self)
488
if (_StaticTuple_is_interned(self)) {
496
static char StaticTuple__is_interned_doc[] = "_is_interned() => True/False\n"
497
"Check to see if this tuple has been interned.\n";
501
StaticTuple_item(StaticTuple *self, Py_ssize_t offset)
504
/* We cast to (int) to avoid worrying about whether Py_ssize_t is a
505
* long long, etc. offsets should never be >2**31 anyway.
508
PyErr_Format(PyExc_IndexError, "StaticTuple_item does not support"
509
" negative indices: %d\n", (int)offset);
510
} else if (offset >= self->size) {
511
PyErr_Format(PyExc_IndexError, "StaticTuple index out of range"
512
" %d >= %d", (int)offset, (int)self->size);
515
obj = (PyObject *)self->items[offset];
521
StaticTuple_slice(StaticTuple *self, Py_ssize_t ilow, Py_ssize_t ihigh)
523
PyObject *as_tuple, *result;
525
as_tuple = StaticTuple_as_tuple(self);
526
if (as_tuple == NULL) {
529
result = PyTuple_Type.tp_as_sequence->sq_slice(as_tuple, ilow, ihigh);
535
StaticTuple_traverse(StaticTuple *self, visitproc visit, void *arg)
538
for (i = self->size; --i >= 0;) {
539
Py_VISIT(self->items[i]);
544
static char StaticTuple_doc[] =
545
"C implementation of a StaticTuple structure."
546
"\n This is used as StaticTuple(item1, item2, item3)"
547
"\n This is similar to tuple, less flexible in what it"
548
"\n supports, but also lighter memory consumption."
549
"\n Note that the constructor mimics the () form of tuples"
550
"\n Rather than the 'tuple()' constructor."
551
"\n eg. StaticTuple(a, b) == (a, b) == tuple((a, b))";
553
static PyMethodDef StaticTuple_methods[] = {
554
{"as_tuple", (PyCFunction)StaticTuple_as_tuple, METH_NOARGS, StaticTuple_as_tuple_doc},
555
{"intern", (PyCFunction)StaticTuple_Intern, METH_NOARGS, StaticTuple_Intern_doc},
556
{"_is_interned", (PyCFunction)StaticTuple__is_interned, METH_NOARGS,
557
StaticTuple__is_interned_doc},
558
{NULL, NULL} /* sentinel */
561
static PySequenceMethods StaticTuple_as_sequence = {
562
(lenfunc)StaticTuple_length, /* sq_length */
565
(ssizeargfunc)StaticTuple_item, /* sq_item */
566
(ssizessizeargfunc)StaticTuple_slice, /* sq_slice */
568
0, /* sq_ass_slice */
572
/* TODO: Implement StaticTuple_as_mapping.
573
* The only thing we really want to support from there is mp_subscript,
574
* so that we could support extended slicing (foo[::2]). Not worth it
579
PyTypeObject StaticTuple_Type = {
580
PyObject_HEAD_INIT(NULL)
582
"StaticTuple", /* tp_name */
583
sizeof(StaticTuple), /* tp_basicsize */
584
sizeof(PyObject *), /* tp_itemsize */
585
(destructor)StaticTuple_dealloc, /* tp_dealloc */
590
(reprfunc)StaticTuple_repr, /* tp_repr */
591
0, /* tp_as_number */
592
&StaticTuple_as_sequence, /* tp_as_sequence */
593
0, /* tp_as_mapping */
594
(hashfunc)StaticTuple_hash, /* tp_hash */
597
PyObject_GenericGetAttr, /* tp_getattro */
599
0, /* tp_as_buffer */
600
Py_TPFLAGS_DEFAULT, /* tp_flags*/
601
StaticTuple_doc, /* tp_doc */
602
/* gc.get_referents checks the IS_GC flag before it calls tp_traverse
603
* And we don't include this object in the garbage collector because we
604
* know it doesn't create cycles. However, 'meliae' will follow
605
* tp_traverse, even if the object isn't GC, and we want that.
607
(traverseproc)StaticTuple_traverse, /* tp_traverse */
609
StaticTuple_richcompare, /* tp_richcompare */
610
0, /* tp_weaklistoffset */
611
// without implementing tp_iter, Python will fall back to PySequence*
612
// which seems to work ok, we may need something faster/lighter in the
616
StaticTuple_methods, /* tp_methods */
621
0, /* tp_descr_get */
622
0, /* tp_descr_set */
623
0, /* tp_dictoffset */
626
StaticTuple_new, /* tp_new */
630
static PyMethodDef static_tuple_c_methods[] = {
636
setup_interned_tuples(PyObject *m)
638
_interned_tuples = (PyObject *)SimpleSet_New();
639
if (_interned_tuples != NULL) {
640
Py_INCREF(_interned_tuples);
641
PyModule_AddObject(m, "_interned_tuples", _interned_tuples);
647
setup_empty_tuple(PyObject *m)
650
if (_interned_tuples == NULL) {
651
fprintf(stderr, "You need to call setup_interned_tuples() before"
652
" setup_empty_tuple, because we intern it.\n");
654
// We need to create the empty tuple
655
stuple = (StaticTuple *)StaticTuple_New(0);
656
_empty_tuple = StaticTuple_Intern(stuple);
657
assert(_empty_tuple == stuple);
658
// At this point, refcnt is 2: 1 from New(), and 1 from the return from
659
// intern(). We will keep 1 for the _empty_tuple global, and use the other
660
// for the module reference.
661
PyModule_AddObject(m, "_empty_tuple", (PyObject *)_empty_tuple);
665
_StaticTuple_CheckExact(PyObject *obj)
667
return StaticTuple_CheckExact(obj);
671
setup_c_api(PyObject *m)
673
_export_function(m, "StaticTuple_New", StaticTuple_New,
674
"StaticTuple *(Py_ssize_t)");
675
_export_function(m, "StaticTuple_Intern", StaticTuple_Intern,
676
"StaticTuple *(StaticTuple *)");
677
_export_function(m, "_StaticTuple_CheckExact", _StaticTuple_CheckExact,
683
init_static_tuple_c(void)
687
if (PyType_Ready(&StaticTuple_Type) < 0)
690
m = Py_InitModule3("_static_tuple_c", static_tuple_c_methods,
691
"C implementation of a StaticTuple structure");
695
Py_INCREF(&StaticTuple_Type);
696
PyModule_AddObject(m, "StaticTuple", (PyObject *)&StaticTuple_Type);
697
if (import_bzrlib___simple_set_pyx() == -1) {
698
// We failed to set up, stop early
701
setup_interned_tuples(m);
702
setup_empty_tuple(m);