Commit 7b65b4e7 authored by Tres Seaver's avatar Tres Seaver

Merge py3k branch.

parents f70f5c92 bbd693c7
......@@ -6,3 +6,4 @@ nosetests.xml
coverage.xml
*.egg
docs/_build
__pycache__
......@@ -41,16 +41,17 @@
* a search finger weren't being used at all, but is still quadratic time
* in the number of buckets in the slice.
*/
typedef struct {
PyObject_HEAD
Bucket *firstbucket; /* First bucket */
Bucket *currentbucket; /* Current bucket (search finger) */
Bucket *lastbucket; /* Last bucket */
int currentoffset; /* Offset in currentbucket */
int pseudoindex; /* search finger index */
int first; /* Start offset in firstbucket */
int last; /* End offset in lastbucket */
char kind; /* 'k', 'v', 'i' */
typedef struct
{
PyObject_HEAD
Bucket *firstbucket; /* First bucket */
Bucket *currentbucket; /* Current bucket (search finger) */
Bucket *lastbucket; /* Last bucket */
int currentoffset; /* Offset in currentbucket */
int pseudoindex; /* search finger index */
int first; /* Start offset in firstbucket */
int last; /* End offset in lastbucket */
char kind; /* 'k', 'v', 'i' */
} BTreeItems;
#define ITEMS(O)((BTreeItems*)(O))
......@@ -63,10 +64,10 @@ newBTreeItems(char kind,
static void
BTreeItems_dealloc(BTreeItems *self)
{
Py_XDECREF(self->firstbucket);
Py_XDECREF(self->lastbucket);
Py_XDECREF(self->currentbucket);
PyObject_DEL(self);
Py_XDECREF(self->firstbucket);
Py_XDECREF(self->lastbucket);
Py_XDECREF(self->currentbucket);
PyObject_DEL(self);
}
static Py_ssize_t
......@@ -77,33 +78,34 @@ BTreeItems_length_or_nonzero(BTreeItems *self, int nonzero)
b = self->firstbucket;
if (b == NULL)
return 0;
return 0;
r = self->last + 1 - self->first;
if (nonzero && r > 0)
/* Short-circuit if all we care about is nonempty */
return 1;
/* Short-circuit if all we care about is nonempty */
return 1;
if (b == self->lastbucket)
return r;
return r;
Py_INCREF(b);
PER_USE_OR_RETURN(b, -1);
while ((next = b->next)) {
r += b->len;
if (nonzero && r > 0)
/* Short-circuit if all we care about is nonempty */
break;
if (next == self->lastbucket)
break; /* we already counted the last bucket */
Py_INCREF(next);
PER_UNUSE(b);
Py_DECREF(b);
b = next;
PER_USE_OR_RETURN(b, -1);
while ((next = b->next))
{
r += b->len;
if (nonzero && r > 0)
/* Short-circuit if all we care about is nonempty */
break;
if (next == self->lastbucket)
break; /* we already counted the last bucket */
Py_INCREF(next);
PER_UNUSE(b);
Py_DECREF(b);
b = next;
PER_USE_OR_RETURN(b, -1);
}
PER_UNUSE(b);
Py_DECREF(b);
......@@ -114,7 +116,7 @@ BTreeItems_length_or_nonzero(BTreeItems *self, int nonzero)
static Py_ssize_t
BTreeItems_length(BTreeItems *self)
{
return BTreeItems_length_or_nonzero(self, 0);
return BTreeItems_length_or_nonzero(self, 0);
}
/*
......@@ -122,8 +124,8 @@ BTreeItems_length(BTreeItems *self)
**
** Find the ith position in the BTreeItems.
**
** Arguments: self The BTree
** i the index to seek to, in 0 .. len(self)-1, or in
** Arguments: self The BTree
** i the index to seek to, in 0 .. len(self)-1, or in
** -len(self) .. -1, as for indexing a Python sequence.
**
**
......@@ -141,10 +143,12 @@ BTreeItems_seek(BTreeItems *self, Py_ssize_t i)
pseudoindex = self->pseudoindex;
currentoffset = self->currentoffset;
currentbucket = self->currentbucket;
if (currentbucket == NULL) goto no_match;
if (currentbucket == NULL)
goto no_match;
delta = i - pseudoindex;
while (delta > 0) { /* move right */
while (delta > 0) /* move right */
{
int max;
/* Want to move right delta positions; the most we can move right in
* this bucket is currentbucket->len - currentoffset - 1 positions.
......@@ -153,34 +157,41 @@ BTreeItems_seek(BTreeItems *self, Py_ssize_t i)
max = currentbucket->len - currentoffset - 1;
b = currentbucket->next;
PER_UNUSE(currentbucket);
if (delta <= max) {
if (delta <= max)
{
currentoffset += delta;
pseudoindex += delta;
if (currentbucket == self->lastbucket
&& currentoffset > self->last) goto no_match;
&& currentoffset > self->last)
goto no_match;
break;
}
/* Move to start of next bucket. */
if (currentbucket == self->lastbucket || b == NULL) goto no_match;
if (currentbucket == self->lastbucket || b == NULL)
goto no_match;
currentbucket = b;
pseudoindex += max + 1;
delta -= max + 1;
currentoffset = 0;
}
while (delta < 0) { /* move left */
while (delta < 0) /* move left */
{
int status;
/* Want to move left -delta positions; the most we can move left in
* this bucket is currentoffset positions.
*/
if ((-delta) <= currentoffset) {
if ((-delta) <= currentoffset)
{
currentoffset += delta;
pseudoindex += delta;
if (currentbucket == self->firstbucket
&& currentoffset < self->first) goto no_match;
&& currentoffset < self->first)
goto no_match;
break;
}
/* Move to end of previous bucket. */
if (currentbucket == self->firstbucket) goto no_match;
if (currentbucket == self->firstbucket)
goto no_match;
status = PreviousBucket(&currentbucket, self->firstbucket);
if (status == 0)
goto no_match;
......@@ -202,10 +213,11 @@ BTreeItems_seek(BTreeItems *self, Py_ssize_t i)
PER_USE_OR_RETURN(currentbucket, -1);
error = currentoffset < 0 || currentoffset >= currentbucket->len;
PER_UNUSE(currentbucket);
if (error) {
PyErr_SetString(PyExc_RuntimeError,
"the bucket being iterated changed size");
return -1;
if (error)
{
PyErr_SetString(PyExc_RuntimeError,
"the bucket being iterated changed size");
return -1;
}
Py_INCREF(currentbucket);
......@@ -232,8 +244,8 @@ getBucketEntry(Bucket *b, int i, char kind)
assert(b);
assert(0 <= i && i < b->len);
switch (kind) {
switch (kind)
{
case 'k':
COPY_KEY_TO_OBJECT(result, b->keys[i]);
break;
......@@ -242,25 +254,30 @@ getBucketEntry(Bucket *b, int i, char kind)
COPY_VALUE_TO_OBJECT(result, b->values[i]);
break;
case 'i': {
case 'i':
{
PyObject *key;
PyObject *value;;
COPY_KEY_TO_OBJECT(key, b->keys[i]);
if (!key) break;
if (!key)
break;
COPY_VALUE_TO_OBJECT(value, b->values[i]);
if (!value) {
if (!value)
{
Py_DECREF(key);
break;
}
result = PyTuple_New(2);
if (result) {
if (result)
{
PyTuple_SET_ITEM(result, 0, key);
PyTuple_SET_ITEM(result, 1, value);
}
else {
else
{
Py_DECREF(key);
Py_DECREF(value);
}
......@@ -278,18 +295,19 @@ getBucketEntry(Bucket *b, int i, char kind)
/*
** BTreeItems_item
**
** Arguments: self a BTreeItems structure
** i Which item to inspect
** Arguments: self a BTreeItems structure
** i Which item to inspect
**
** Returns: the BTreeItems_item_BTree of self->kind, i
** (ie pulls the ith item out)
** Returns: the BTreeItems_item_BTree of self->kind, i
** (ie pulls the ith item out)
*/
static PyObject *
BTreeItems_item(BTreeItems *self, Py_ssize_t i)
{
PyObject *result;
if (BTreeItems_seek(self, i) < 0) return NULL;
if (BTreeItems_seek(self, i) < 0)
return NULL;
PER_USE_OR_RETURN(self->currentbucket, NULL);
result = getBucketEntry(self->currentbucket, self->currentoffset,
......@@ -304,93 +322,151 @@ BTreeItems_item(BTreeItems *self, Py_ssize_t i)
** Creates a new BTreeItems structure representing the slice
** between the low and high range
**
** Arguments: self The old BTreeItems structure
** ilow The start index
** ihigh The end index
** Arguments: self The old BTreeItems structure
** ilow The start index
** ihigh The end index
**
** Returns: BTreeItems item
** Returns: BTreeItems item
*/
static PyObject *
BTreeItems_slice(BTreeItems *self, Py_ssize_t ilow, Py_ssize_t ihigh)
{
Bucket *lowbucket;
Bucket *highbucket;
int lowoffset;
int highoffset;
Py_ssize_t length = -1; /* len(self), but computed only if needed */
/* Complications:
* A Python slice never raises IndexError, but BTreeItems_seek does.
* Python did only part of index normalization before calling this:
* ilow may be < 0 now, and ihigh may be arbitrarily large. It's
* our responsibility to clip them.
* A Python slice is exclusive of the high index, but a BTreeItems
* struct is inclusive on both ends.
*/
/* First adjust ilow and ihigh to be legit endpoints in the Python
* sense (ilow inclusive, ihigh exclusive). This block duplicates the
* logic from Python's list_slice function (slicing for builtin lists).
*/
if (ilow < 0)
ilow = 0;
else {
if (length < 0)
length = BTreeItems_length(self);
if (ilow > length)
ilow = length;
}
if (ihigh < ilow)
ihigh = ilow;
else {
if (length < 0)
length = BTreeItems_length(self);
if (ihigh > length)
ihigh = length;
}
assert(0 <= ilow && ilow <= ihigh);
assert(length < 0 || ihigh <= length);
/* Now adjust for that our struct is inclusive on both ends. This is
* easy *except* when the slice is empty: there's no good way to spell
* that in an inclusive-on-both-ends scheme. For example, if the
* slice is btree.items([:0]), ilow == ihigh == 0 at this point, and if
* we were to subtract 1 from ihigh that would get interpreted by
* BTreeItems_seek as meaning the *entire* set of items. Setting ilow==1
* and ihigh==0 doesn't work either, as BTreeItems_seek raises IndexError
* if we attempt to seek to ilow==1 when the underlying sequence is empty.
* It seems simplest to deal with empty slices as a special case here.
*/
if (ilow == ihigh) {
/* empty slice */
lowbucket = highbucket = NULL;
lowoffset = 1;
highoffset = 0;
}
else {
assert(ilow < ihigh);
--ihigh; /* exclusive -> inclusive */
if (BTreeItems_seek(self, ilow) < 0) return NULL;
lowbucket = self->currentbucket;
lowoffset = self->currentoffset;
if (BTreeItems_seek(self, ihigh) < 0) return NULL;
highbucket = self->currentbucket;
highoffset = self->currentoffset;
}
return newBTreeItems(self->kind,
lowbucket, lowoffset, highbucket, highoffset);
Bucket *lowbucket;
Bucket *highbucket;
int lowoffset;
int highoffset;
Py_ssize_t length = -1; /* len(self), but computed only if needed */
/* Complications:
* A Python slice never raises IndexError, but BTreeItems_seek does.
* Python did only part of index normalization before calling this:
* ilow may be < 0 now, and ihigh may be arbitrarily large. It's
* our responsibility to clip them.
* A Python slice is exclusive of the high index, but a BTreeItems
* struct is inclusive on both ends.
*/
/* First adjust ilow and ihigh to be legit endpoints in the Python
* sense (ilow inclusive, ihigh exclusive). This block duplicates the
* logic from Python's list_slice function (slicing for builtin lists).
*/
if (ilow < 0)
ilow = 0;
else
{
if (length < 0)
length = BTreeItems_length(self);
if (ilow > length)
ilow = length;
}
if (ihigh < ilow)
ihigh = ilow;
else
{
if (length < 0)
length = BTreeItems_length(self);
if (ihigh > length)
ihigh = length;
}
assert(0 <= ilow && ilow <= ihigh);
assert(length < 0 || ihigh <= length);
/* Now adjust for that our struct is inclusive on both ends. This is
* easy *except* when the slice is empty: there's no good way to spell
* that in an inclusive-on-both-ends scheme. For example, if the
* slice is btree.items([:0]), ilow == ihigh == 0 at this point, and if
* we were to subtract 1 from ihigh that would get interpreted by
* BTreeItems_seek as meaning the *entire* set of items. Setting ilow==1
* and ihigh==0 doesn't work either, as BTreeItems_seek raises IndexError
* if we attempt to seek to ilow==1 when the underlying sequence is empty.
* It seems simplest to deal with empty slices as a special case here.
*/
if (ilow == ihigh) /* empty slice */
{
lowbucket = highbucket = NULL;
lowoffset = 1;
highoffset = 0;
}
else
{
assert(ilow < ihigh);
--ihigh; /* exclusive -> inclusive */
if (BTreeItems_seek(self, ilow) < 0)
return NULL;
lowbucket = self->currentbucket;
lowoffset = self->currentoffset;
if (BTreeItems_seek(self, ihigh) < 0)
return NULL;
highbucket = self->currentbucket;
highoffset = self->currentoffset;
}
return newBTreeItems(self->kind,
lowbucket, lowoffset, highbucket, highoffset);
}
static PyObject *
BTreeItems_subscript(BTreeItems *self, PyObject* subscript)
{
Py_ssize_t len = BTreeItems_length_or_nonzero(self, 0);
if (PyIndex_Check(subscript))
{
Py_ssize_t i = PyNumber_AsSsize_t(subscript, PyExc_IndexError);
if (i == -1 && PyErr_Occurred())
return NULL;
if (i < 0)
i += len;
return BTreeItems_item(self, i);
}
if (PySlice_Check(subscript))
{
Py_ssize_t start, stop, step, slicelength;
#ifdef PY3K
#define SLICEOBJ(x) (x)
#else
#define SLICEOBJ(x) (PySliceObject*)(x)
#endif
if (PySlice_GetIndicesEx(SLICEOBJ(subscript), len,
&start, &stop, &step, &slicelength) < 0)
{
return NULL;
}
if (step != 1)
{
PyErr_SetString(PyExc_RuntimeError,
"slices must have step size of 1");
return NULL;
}
return BTreeItems_slice(self, start, stop);
}
PyErr_SetString(PyExc_RuntimeError,
"Unknown index type: must be int or slice");
return NULL;
}
static PySequenceMethods BTreeItems_as_sequence = {
(lenfunc) BTreeItems_length,
(binaryfunc)0,
(ssizeargfunc)0,
(ssizeargfunc) BTreeItems_item,
(ssizessizeargfunc) BTreeItems_slice,
/* Py3K doesn't honor sequence slicing, so implement via mapping */
static PyMappingMethods BTreeItems_as_mapping = {
(lenfunc)BTreeItems_length, /* mp_length */
(binaryfunc)BTreeItems_subscript, /* mp_subscript */
};
static PySequenceMethods BTreeItems_as_sequence =
{
(lenfunc) BTreeItems_length, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc) BTreeItems_item, /* sq_item */
#ifndef PY3K
/* Py3K doesn't honor this slot */
(ssizessizeargfunc) BTreeItems_slice, /* sq_slice */
#endif
};
/* Number Method items (just for nb_nonzero!) */
......@@ -398,38 +474,48 @@ static PySequenceMethods BTreeItems_as_sequence = {
static int
BTreeItems_nonzero(BTreeItems *self)
{
return BTreeItems_length_or_nonzero(self, 1);
return BTreeItems_length_or_nonzero(self, 1);
}
static PyNumberMethods BTreeItems_as_number_for_nonzero = {
0,0,0,0,0,0,0,0,0,0,
(inquiry)BTreeItems_nonzero};
0, /* nb_add */
0, /* nb_subtract */
0, /* nb_multiply */
#ifndef PY3K
0, /* nb_divide */
#endif
0, /* nb_remainder */
0, /* nb_divmod */
0, /* nb_power */
0, /* nb_negative */
0, /* nb_positive */
0, /* nb_absolute */
(inquiry)BTreeItems_nonzero /* nb_nonzero */
};
static PyTypeObject BTreeItemsType = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
MOD_NAME_PREFIX "BTreeItems", /*tp_name*/
sizeof(BTreeItems), /*tp_basicsize*/
0, /*tp_itemsize*/
/* methods */
(destructor) BTreeItems_dealloc, /*tp_dealloc*/
(printfunc)0, /*tp_print*/
(getattrfunc)0, /*obsolete tp_getattr*/
(setattrfunc)0, /*obsolete tp_setattr*/
(cmpfunc)0, /*tp_compare*/
(reprfunc)0, /*tp_repr*/
&BTreeItems_as_number_for_nonzero, /*tp_as_number*/
&BTreeItems_as_sequence, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)0, /*tp_hash*/
(ternaryfunc)0, /*tp_call*/
(reprfunc)0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
/* Space for future expansion */
0L,0L,
"Sequence type used to iterate over BTree items." /* Documentation string */
PyVarObject_HEAD_INIT(NULL, 0)
MOD_NAME_PREFIX "BTreeItems", /* tp_name */
sizeof(BTreeItems), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor) BTreeItems_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* obsolete tp_getattr */
0, /* obsolete tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
&BTreeItems_as_number_for_nonzero, /* tp_as_number */
&BTreeItems_as_sequence, /* tp_as_sequence */
&BTreeItems_as_mapping, /* tp_as_mapping */
(hashfunc)0, /* tp_hash */
(ternaryfunc)0, /* tp_call */
(reprfunc)0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
/* Space for future expansion */
0L,0L,
"Sequence type used to iterate over BTree items." /* Documentation string */
};
/* Returns a new BTreeItems object representing the contiguous slice from
......@@ -443,120 +529,121 @@ newBTreeItems(char kind,
Bucket *lowbucket, int lowoffset,
Bucket *highbucket, int highoffset)
{
BTreeItems *self;
BTreeItems *self;
UNLESS (self = PyObject_NEW(BTreeItems, &BTreeItemsType)) return NULL;
self->kind=kind;
UNLESS (self = PyObject_NEW(BTreeItems, &BTreeItemsType))
return NULL;
self->kind=kind;
self->first=lowoffset;
self->last=highoffset;
self->first=lowoffset;
self->last=highoffset;
if (! lowbucket || ! highbucket
|| (lowbucket == highbucket && lowoffset > highoffset))
if (! lowbucket || ! highbucket
|| (lowbucket == highbucket && lowoffset > highoffset))
{
self->firstbucket = 0;
self->lastbucket = 0;
self->currentbucket = 0;
self->firstbucket = 0;
self->lastbucket = 0;
self->currentbucket = 0;
}
else
else
{
Py_INCREF(lowbucket);
self->firstbucket = lowbucket;
Py_INCREF(highbucket);
self->lastbucket = highbucket;
Py_INCREF(lowbucket);
self->currentbucket = lowbucket;
Py_INCREF(lowbucket);
self->firstbucket = lowbucket;
Py_INCREF(highbucket);
self->lastbucket = highbucket;
Py_INCREF(lowbucket);
self->currentbucket = lowbucket;
}
self->currentoffset = lowoffset;
self->pseudoindex = 0;
self->currentoffset = lowoffset;
self->pseudoindex = 0;
return OBJECT(self);
return OBJECT(self);
}
static int
nextBTreeItems(SetIteration *i)
{
if (i->position >= 0)
if (i->position >= 0)
{
if (i->position)
if (i->position)
{
DECREF_KEY(i->key);
DECREF_VALUE(i->value);
DECREF_KEY(i->key);
DECREF_VALUE(i->value);
}
if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0)
if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0)
{
Bucket *currentbucket;
Bucket *currentbucket;
currentbucket = BUCKET(ITEMS(i->set)->currentbucket);
UNLESS(PER_USE(currentbucket))
currentbucket = BUCKET(ITEMS(i->set)->currentbucket);
UNLESS(PER_USE(currentbucket))
{
/* Mark iteration terminated, so that finiSetIteration doesn't
* try to redundantly decref the key and value
*/
i->position = -1;
return -1;
/* Mark iteration terminated, so that finiSetIteration doesn't
* try to redundantly decref the key and value
*/
i->position = -1;
return -1;
}
COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]);
INCREF_KEY(i->key);
COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]);
INCREF_KEY(i->key);
COPY_VALUE(i->value,
currentbucket->values[ITEMS(i->set)->currentoffset]);
INCREF_VALUE(i->value);
COPY_VALUE(i->value,
currentbucket->values[ITEMS(i->set)->currentoffset]);
INCREF_VALUE(i->value);
i->position ++;
i->position ++;
PER_UNUSE(currentbucket);
PER_UNUSE(currentbucket);
}
else
else
{
i->position = -1;
PyErr_Clear();
i->position = -1;
PyErr_Clear();
}
}
return 0;
return 0;
}
static int
nextTreeSetItems(SetIteration *i)
{
if (i->position >= 0)
if (i->position >= 0)
{
if (i->position)
if (i->position)
{
DECREF_KEY(i->key);
DECREF_KEY(i->key);
}
if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0)
if (BTreeItems_seek(ITEMS(i->set), i->position) >= 0)
{
Bucket *currentbucket;
Bucket *currentbucket;
currentbucket = BUCKET(ITEMS(i->set)->currentbucket);
UNLESS(PER_USE(currentbucket))
currentbucket = BUCKET(ITEMS(i->set)->currentbucket);
UNLESS(PER_USE(currentbucket))
{
/* Mark iteration terminated, so that finiSetIteration doesn't
* try to redundantly decref the key and value
*/
i->position = -1;
return -1;
/* Mark iteration terminated, so that finiSetIteration doesn't
* try to redundantly decref the key and value
*/
i->position = -1;
return -1;
}
COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]);
INCREF_KEY(i->key);
COPY_KEY(i->key, currentbucket->keys[ITEMS(i->set)->currentoffset]);
INCREF_KEY(i->key);
i->position ++;
i->position ++;
PER_UNUSE(currentbucket);
PER_UNUSE(currentbucket);
}
else
else
{
i->position = -1;
PyErr_Clear();
i->position = -1;
PyErr_Clear();
}
}
return 0;
return 0;
}
/* Support for the iteration protocol new in Python 2.2. */
......@@ -564,7 +651,8 @@ nextTreeSetItems(SetIteration *i)
static PyTypeObject BTreeIter_Type;
/* The type of iterator objects, returned by e.g. iter(IIBTree()). */
typedef struct {
typedef struct
{
PyObject_HEAD
/* We use a BTreeItems object because it's convenient and flexible.
* We abuse it two ways:
......@@ -584,7 +672,8 @@ BTreeIter_new(BTreeItems *pitems)
assert(pitems != NULL);
result = PyObject_New(BTreeIter, &BTreeIter_Type);
if (result) {
if (result)
{
Py_INCREF(pitems);
result->pitems = pitems;
}
......@@ -595,8 +684,8 @@ BTreeIter_new(BTreeItems *pitems)
static void
BTreeIter_dealloc(BTreeIter *bi)
{
Py_DECREF(bi->pitems);
PyObject_Del(bi);
Py_DECREF(bi->pitems);
PyObject_Del(bi);
}
/* The implementation of the iterator's tp_iternext slot. Returns "the next"
......@@ -606,45 +695,49 @@ BTreeIter_dealloc(BTreeIter *bi)
static PyObject *
BTreeIter_next(BTreeIter *bi, PyObject *args)
{
PyObject *result = NULL; /* until proven innocent */
BTreeItems *items = bi->pitems;
int i = items->currentoffset;
Bucket *bucket = items->currentbucket;
if (bucket == NULL) /* iteration termination is sticky */
return NULL;
PER_USE_OR_RETURN(bucket, NULL);
if (i >= bucket->len) {
/* We never leave this routine normally with i >= len: somebody
* else mutated the current bucket.
*/
PyErr_SetString(PyExc_RuntimeError,
"the bucket being iterated changed size");
/* Arrange for that this error is sticky too. */
items->currentoffset = INT_MAX;
goto Done;
}
/* Build the result object, from bucket at offset i. */
result = getBucketEntry(bucket, i, items->kind);
/* Advance position for next call. */
if (bucket == items->lastbucket && i >= items->last) {
/* Next call should terminate the iteration. */
Py_DECREF(items->currentbucket);
items->currentbucket = NULL;
}
else {
++i;
if (i >= bucket->len) {
Py_XINCREF(bucket->next);
items->currentbucket = bucket->next;
Py_DECREF(bucket);
i = 0;
}
items->currentoffset = i;
PyObject *result = NULL; /* until proven innocent */
BTreeItems *items = bi->pitems;
int i = items->currentoffset;
Bucket *bucket = items->currentbucket;
if (bucket == NULL) /* iteration termination is sticky */
return NULL;
PER_USE_OR_RETURN(bucket, NULL);
if (i >= bucket->len)
{
/* We never leave this routine normally with i >= len: somebody
* else mutated the current bucket.
*/
PyErr_SetString(PyExc_RuntimeError,
"the bucket being iterated changed size");
/* Arrange for that this error is sticky too. */
items->currentoffset = INT_MAX;
goto Done;
}
/* Build the result object, from bucket at offset i. */
result = getBucketEntry(bucket, i, items->kind);
/* Advance position for next call. */
if (bucket == items->lastbucket && i >= items->last)
{
/* Next call should terminate the iteration. */
Py_DECREF(items->currentbucket);
items->currentbucket = NULL;
}
else
{
++i;
if (i >= bucket->len)
{
Py_XINCREF(bucket->next);
items->currentbucket = bucket->next;
Py_DECREF(bucket);
i = 0;
}
items->currentoffset = i;
}
Done:
PER_UNUSE(bucket);
......@@ -659,40 +752,39 @@ BTreeIter_getiter(PyObject *it)
}
static PyTypeObject BTreeIter_Type = {
PyObject_HEAD_INIT(NULL)
0, /* ob_size */
MOD_NAME_PREFIX "-iterator", /* tp_name */
sizeof(BTreeIter), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)BTreeIter_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /*PyObject_GenericGetAttr,*/ /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)BTreeIter_getiter, /* tp_iter */
(iternextfunc)BTreeIter_next, /* tp_iternext */
0, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
PyVarObject_HEAD_INIT(NULL, 0)
MOD_NAME_PREFIX "-iterator", /* tp_name */
sizeof(BTreeIter), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)BTreeIter_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /*PyObject_GenericGetAttr,*/ /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)BTreeIter_getiter, /* tp_iter */
(iternextfunc)BTreeIter_next, /* tp_iternext */
0, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
};
......@@ -15,6 +15,7 @@
#include "Python.h"
/* include structmember.h for offsetof */
#include "structmember.h"
#include "bytesobject.h"
#ifdef PERSISTENT
#include "persistent/cPersistence.h"
......@@ -27,7 +28,7 @@
#define PER_ACCESSED(O) 1
#endif
#include "py24compat.h"
#include "_compat.h"
/* So sue me. This pair gets used all over the place, so much so that it
* interferes with understanding non-persistence parts of algorithms.
......@@ -65,7 +66,7 @@ static void PyVar_Assign(PyObject **v, PyObject *e) { Py_XDECREF(*v); *v=e;}
#define MAX_BTREE_SIZE(B) DEFAULT_MAX_BTREE_SIZE
#define MAX_BUCKET_SIZE(B) DEFAULT_MAX_BUCKET_SIZE
#define SameType_Check(O1, O2) ((O1)->ob_type==(O2)->ob_type)
#define SameType_Check(O1, O2) (Py_TYPE((O1))==Py_TYPE((O2)))
#define ASSERT(C, S, R) if (! (C)) { \
PyErr_SetString(PyExc_AssertionError, (S)); return (R); }
......@@ -81,7 +82,7 @@ static void PyVar_Assign(PyObject **v, PyObject *e) { Py_XDECREF(*v); *v=e;}
static int
longlong_check(PyObject *ob)
{
if (PyInt_Check(ob))
if (INT_CHECK(ob))
return 1;
if (PyLong_Check(ob)) {
......@@ -101,10 +102,52 @@ longlong_as_object(PY_LONG_LONG val)
static PY_LONG_LONG maxint = 0;
if (maxint == 0)
maxint = PyInt_GetMax();
maxint = INT_GETMAX();
if ((val > maxint) || (val < (-maxint-1)))
return PyLong_FromLongLong(val);
return PyInt_FromLong((long)val);
return INT_FROM_LONG((long)val);
}
#endif
#ifdef NEED_LONG_LONG_KEYS
static int
longlong_convert(PyObject *ob, PY_LONG_LONG *value)
{
#ifndef PY3K
if (PyInt_Check(ob))
{
(*value) = (PY_LONG_LONG)PyInt_AS_LONG(ob);
return 1;
}
#endif
if (!PyLong_Check(ob))
{
PyErr_SetString(PyExc_TypeError, "expected integer key");
return 0;
}
else
{
PY_LONG_LONG val;
#if PY_VERSION_HEX < 0x02070000
/* check magnitude */
val = PyLong_AsLongLong(ob);
if (val == -1 && PyErr_Occurred())
goto overflow;
#else
int overflow;
val = PyLong_AsLongLongAndOverflow(ob, &overflow);
if (overflow)
goto overflow;
#endif
(*value) = val;
return 1;
}
overflow:
PyErr_SetString(PyExc_ValueError, "long integer out of range");
return 0;
}
#endif
......@@ -291,7 +334,7 @@ IndexError(int i)
{
PyObject *v;
v = PyInt_FromLong(i);
v = INT_FROM_LONG(i);
if (!v) {
v = Py_None;
Py_INCREF(v);
......@@ -451,7 +494,11 @@ BTREEITEMSTEMPLATE_C
int
init_persist_type(PyTypeObject *type)
{
#ifdef PY3K
((PyObject*)type)->ob_type = &PyType_Type;
#else
type->ob_type = &PyType_Type;
#endif
type->tp_base = cPersistenceCAPI->pertype;
if (PyType_Ready(type) < 0)
......@@ -460,120 +507,167 @@ init_persist_type(PyTypeObject *type)
return 1;
}
void
INITMODULE (void)
#ifdef PY3K
static struct PyModuleDef moduledef = {
PyModuleDef_HEAD_INIT,
"_" MOD_NAME_PREFIX "BTree", /* m_name */
BTree_module_documentation, /* m_doc */
-1, /* m_size */
module_methods, /* m_methods */
NULL, /* m_reload */
NULL, /* m_traverse */
NULL, /* m_clear */
NULL, /* m_free */
};
#endif
static PyObject*
module_init(void)
{
PyObject *m, *d, *c;
PyObject *module, *mod_dict, *interfaces, *conflicterr;
#ifdef KEY_TYPE_IS_PYOBJECT
object_ = PyTuple_GetItem(Py_None->ob_type->tp_bases, 0);
object_ = PyTuple_GetItem(Py_TYPE(Py_None)->tp_bases, 0);
if (object_ == NULL)
return;
return NULL;
#endif
sort_str = PyString_InternFromString("sort");
sort_str = INTERN("sort");
if (!sort_str)
return;
reverse_str = PyString_InternFromString("reverse");
return NULL;
reverse_str = INTERN("reverse");
if (!reverse_str)
return;
__setstate___str = PyString_InternFromString("__setstate__");
return NULL;
__setstate___str = INTERN("__setstate__");
if (!__setstate___str)
return;
_bucket_type_str = PyString_InternFromString("_bucket_type");
return NULL;
_bucket_type_str = INTERN("_bucket_type");
if (!_bucket_type_str)
return;
return NULL;
/* Grab the ConflictError class */
m = PyImport_ImportModule("BTrees.Interfaces");
if (m != NULL) {
c = PyObject_GetAttrString(m, "BTreesConflictError");
if (c != NULL)
ConflictError = c;
Py_DECREF(m);
interfaces = PyImport_ImportModule("BTrees.Interfaces");
if (interfaces != NULL)
{
conflicterr = PyObject_GetAttrString(interfaces, "BTreesConflictError");
if (conflicterr != NULL)
ConflictError = conflicterr;
Py_DECREF(interfaces);
}
if (ConflictError == NULL) {
Py_INCREF(PyExc_ValueError);
ConflictError=PyExc_ValueError;
if (ConflictError == NULL)
{
Py_INCREF(PyExc_ValueError);
ConflictError=PyExc_ValueError;
}
/* Initialize the PyPersist_C_API and the type objects. */
cPersistenceCAPI = PyCObject_Import("persistent.cPersistence", "CAPI");
#ifdef PY3K
cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCapsule_Import(
"persistent.cPersistence.CAPI", 0);
#else
cPersistenceCAPI = (cPersistenceCAPIstruct *)PyCObject_Import(
"persistent.cPersistence", "CAPI");
#endif
if (cPersistenceCAPI == NULL)
return;
return NULL;
BTreeItemsType.ob_type = &PyType_Type;
BTreeIter_Type.ob_type = &PyType_Type;
#ifdef PY3K
#define _SET_TYPE(typ) ((PyObject*)(&typ))->ob_type = &PyType_Type
#else
#define _SET_TYPE(typ) (typ).ob_type = &PyType_Type
#endif
_SET_TYPE(BTreeItemsType);
_SET_TYPE(BTreeIter_Type);
BTreeIter_Type.tp_getattro = PyObject_GenericGetAttr;
BucketType.tp_new = PyType_GenericNew;
SetType.tp_new = PyType_GenericNew;
BTreeType.tp_new = PyType_GenericNew;
TreeSetType.tp_new = PyType_GenericNew;
if (!init_persist_type(&BucketType))
return;
return NULL;
if (!init_persist_type(&BTreeType))
return;
return NULL;
if (!init_persist_type(&SetType))
return;
return NULL;
if (!init_persist_type(&TreeSetType))
return;
return NULL;
if (PyDict_SetItem(BTreeType.tp_dict, _bucket_type_str,
(PyObject *)&BucketType) < 0) {
fprintf(stderr, "btree failed\n");
return;
(PyObject *)&BucketType) < 0)
{
fprintf(stderr, "btree failed\n");
return NULL;
}
if (PyDict_SetItem(TreeSetType.tp_dict, _bucket_type_str,
(PyObject *)&SetType) < 0) {
fprintf(stderr, "bucket failed\n");
return;
(PyObject *)&SetType) < 0)
{
fprintf(stderr, "bucket failed\n");
return NULL;
}
/* Create the module and add the functions */
m = Py_InitModule4("_" MOD_NAME_PREFIX "BTree",
#ifdef PY3K
module = PyModule_Create(&moduledef);
#else
module = Py_InitModule4("_" MOD_NAME_PREFIX "BTree",
module_methods, BTree_module_documentation,
(PyObject *)NULL, PYTHON_API_VERSION);
#endif
/* Add some symbolic constants to the module */
d = PyModule_GetDict(m);
if (PyDict_SetItemString(d, MOD_NAME_PREFIX "Bucket",
mod_dict = PyModule_GetDict(module);
if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "Bucket",
(PyObject *)&BucketType) < 0)
return;
if (PyDict_SetItemString(d, MOD_NAME_PREFIX "BTree",
return NULL;
if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "BTree",
(PyObject *)&BTreeType) < 0)
return;
if (PyDict_SetItemString(d, MOD_NAME_PREFIX "Set",
return NULL;
if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "Set",
(PyObject *)&SetType) < 0)
return;
if (PyDict_SetItemString(d, MOD_NAME_PREFIX "TreeSet",
return NULL;
if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "TreeSet",
(PyObject *)&TreeSetType) < 0)
return;
if (PyDict_SetItemString(d, MOD_NAME_PREFIX "TreeIterator",
return NULL;
if (PyDict_SetItemString(mod_dict, MOD_NAME_PREFIX "TreeIterator",
(PyObject *)&BTreeIter_Type) < 0)
return;
return NULL;
/* We also want to be able to access these constants without the prefix
* so that code can more easily exchange modules (particularly the integer
* and long modules, but also others). The TreeIterator is only internal,
* so we don't bother to expose that.
*/
if (PyDict_SetItemString(d, "Bucket",
if (PyDict_SetItemString(mod_dict, "Bucket",
(PyObject *)&BucketType) < 0)
return;
if (PyDict_SetItemString(d, "BTree",
return NULL;
if (PyDict_SetItemString(mod_dict, "BTree",
(PyObject *)&BTreeType) < 0)
return;
if (PyDict_SetItemString(d, "Set",
return NULL;
if (PyDict_SetItemString(mod_dict, "Set",
(PyObject *)&SetType) < 0)
return;
if (PyDict_SetItemString(d, "TreeSet",
return NULL;
if (PyDict_SetItemString(mod_dict, "TreeSet",
(PyObject *)&TreeSetType) < 0)
return;
return NULL;
#if defined(ZODB_64BIT_INTS) && defined(NEED_LONG_LONG_SUPPORT)
if (PyDict_SetItemString(d, "using64bits", Py_True) < 0)
return;
if (PyDict_SetItemString(mod_dict, "using64bits", Py_True) < 0)
return NULL;
#else
if (PyDict_SetItemString(d, "using64bits", Py_False) < 0)
return;
if (PyDict_SetItemString(mod_dict, "using64bits", Py_False) < 0)
return NULL;
#endif
return module;
}
#ifdef PY3K
PyMODINIT_FUNC INITMODULE(void)
{
return module_init();
}
#else
PyMODINIT_FUNC INITMODULE(void)
{
module_init();
}
#endif
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -54,9 +54,9 @@
** Search a bucket for a given key.
**
** Arguments
** self The bucket
** keyarg The key to look for
** has_key Boolean; if true, return a true/false result; else return
** self The bucket
** keyarg The key to look for
** has_key Boolean; if true, return a true/false result; else return
** the value associated with the key.
**
** Return
......@@ -75,37 +75,38 @@
static PyObject *
_bucket_get(Bucket *self, PyObject *keyarg, int has_key)
{
int i, cmp;
KEY_TYPE key;
PyObject *r = NULL;
int copied = 1;
int i, cmp;
KEY_TYPE key;
PyObject *r = NULL;
int copied = 1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS (copied) return NULL;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS (copied) return NULL;
UNLESS (PER_USE(self)) return NULL;
UNLESS (PER_USE(self)) return NULL;
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (has_key)
r = PyInt_FromLong(cmp ? 0 : has_key);
else {
if (cmp == 0) {
COPY_VALUE_TO_OBJECT(r, self->values[i]);
}
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (has_key)
r = INT_FROM_LONG(cmp ? 0 : has_key);
else
PyErr_SetObject(PyExc_KeyError, keyarg);
}
Done:
PER_UNUSE(self);
return r;
{
if (cmp == 0)
{
COPY_VALUE_TO_OBJECT(r, self->values[i]);
}
else
PyErr_SetObject(PyExc_KeyError, keyarg);
}
Done:
PER_UNUSE(self);
return r;
}
static PyObject *
bucket_getitem(Bucket *self, PyObject *key)
{
return _bucket_get(self, key, 0);
return _bucket_get(self, key, 0);
}
/*
......@@ -126,47 +127,53 @@ bucket_getitem(Bucket *self, PyObject *key)
static int
Bucket_grow(Bucket *self, int newsize, int noval)
{
KEY_TYPE *keys;
VALUE_TYPE *values;
if (self->size) {
if (newsize < 0)
newsize = self->size * 2;
if (newsize < 0) /* int overflow */
goto Overflow;
UNLESS (keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE) * newsize))
return -1;
UNLESS (noval) {
values = BTree_Realloc(self->values, sizeof(VALUE_TYPE) * newsize);
if (values == NULL) {
free(keys);
return -1;
}
self->values = values;
KEY_TYPE *keys;
VALUE_TYPE *values;
if (self->size)
{
if (newsize < 0)
newsize = self->size * 2;
if (newsize < 0) /* int overflow */
goto Overflow;
UNLESS (keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE) * newsize))
return -1;
UNLESS (noval)
{
values = BTree_Realloc(self->values, sizeof(VALUE_TYPE) * newsize);
if (values == NULL)
{
free(keys);
return -1;
}
self->values = values;
}
self->keys = keys;
}
self->keys = keys;
}
else {
if (newsize < 0)
newsize = MIN_BUCKET_ALLOC;
UNLESS (self->keys = BTree_Malloc(sizeof(KEY_TYPE) * newsize))
return -1;
UNLESS (noval) {
self->values = BTree_Malloc(sizeof(VALUE_TYPE) * newsize);
if (self->values == NULL) {
free(self->keys);
self->keys = NULL;
return -1;
}
else
{
if (newsize < 0)
newsize = MIN_BUCKET_ALLOC;
UNLESS (self->keys = BTree_Malloc(sizeof(KEY_TYPE) * newsize))
return -1;
UNLESS (noval)
{
self->values = BTree_Malloc(sizeof(VALUE_TYPE) * newsize);
if (self->values == NULL)
{
free(self->keys);
self->keys = NULL;
return -1;
}
}
}
}
self->size = newsize;
return 0;
self->size = newsize;
return 0;
Overflow:
PyErr_NoMemory();
return -1;
Overflow:
PyErr_NoMemory();
return -1;
}
/* So far, bucket_append is called only by multiunion_m(), so is called
......@@ -213,54 +220,59 @@ static int
bucket_append(Bucket *self, Bucket *from, int i, int n,
int copyValues, int overallocate)
{
int newlen;
assert(self && from && self != from);
assert(i >= 0);
assert(n > 0);
assert(i+n <= from->len);
/* Make room. */
newlen = self->len + n;
if (newlen > self->size) {
int newsize = newlen;
if (overallocate) /* boost by 25% -- pretty arbitrary */
newsize += newsize >> 2;
if (Bucket_grow(self, newsize, ! copyValues) < 0)
return -1;
}
assert(newlen <= self->size);
/* Copy stuff. */
memcpy(self->keys + self->len, from->keys + i, n * sizeof(KEY_TYPE));
if (copyValues) {
assert(self->values);
assert(from->values);
memcpy(self->values + self->len, from->values + i,
n * sizeof(VALUE_TYPE));
}
self->len = newlen;
int newlen;
assert(self && from && self != from);
assert(i >= 0);
assert(n > 0);
assert(i+n <= from->len);
/* Make room. */
newlen = self->len + n;
if (newlen > self->size)
{
int newsize = newlen;
if (overallocate) /* boost by 25% -- pretty arbitrary */
newsize += newsize >> 2;
if (Bucket_grow(self, newsize, ! copyValues) < 0)
return -1;
}
assert(newlen <= self->size);
/* Copy stuff. */
memcpy(self->keys + self->len, from->keys + i, n * sizeof(KEY_TYPE));
if (copyValues)
{
assert(self->values);
assert(from->values);
memcpy(self->values + self->len, from->values + i,
n * sizeof(VALUE_TYPE));
}
self->len = newlen;
/* Bump refcounts. */
/* Bump refcounts. */
#ifdef KEY_TYPE_IS_PYOBJECT
{
int j;
PyObject **p = from->keys + i;
for (j = 0; j < n; ++j, ++p) {
Py_INCREF(*p);
{
int j;
PyObject **p = from->keys + i;
for (j = 0; j < n; ++j, ++p)
{
Py_INCREF(*p);
}
}
}
#endif
#ifdef VALUE_TYPE_IS_PYOBJECT
if (copyValues) {
int j;
PyObject **p = from->values + i;
for (j = 0; j < n; ++j, ++p) {
Py_INCREF(*p);
if (copyValues)
{
int j;
PyObject **p = from->values + i;
for (j = 0; j < n; ++j, ++p)
{
Py_INCREF(*p);
}
}
}
#endif
return 0;
return 0;
}
#endif /* MULTI_INT_UNION */
......@@ -291,136 +303,149 @@ static int
_bucket_set(Bucket *self, PyObject *keyarg, PyObject *v,
int unique, int noval, int *changed)
{
int i, cmp;
KEY_TYPE key;
/* Subtle: there may or may not be a value. If there is, we need to
* check its type early, so that in case of error we can get out before
* mutating the bucket. But because value isn't used on all paths, if
* we don't initialize value then gcc gives a nuisance complaint that
* value may be used initialized (it can't be, but gcc doesn't know
* that). So we initialize it. However, VALUE_TYPE can be various types,
* including int, PyObject*, and char[6], so it's a puzzle to spell
* initialization. It so happens that {0} is a valid initializer for all
* these types.
*/
VALUE_TYPE value = {0}; /* squash nuisance warning */
int result = -1; /* until proven innocent */
int copied = 1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS(copied) return -1;
/* Copy the value early (if needed), so that in case of error a
* pile of bucket mutations don't need to be undone.
*/
if (v && !noval) {
COPY_VALUE_FROM_ARG(value, v, copied);
UNLESS(copied) return -1;
}
int i, cmp;
KEY_TYPE key;
/* Subtle: there may or may not be a value. If there is, we need to
* check its type early, so that in case of error we can get out before
* mutating the bucket. But because value isn't used on all paths, if
* we don't initialize value then gcc gives a nuisance complaint that
* value may be used initialized (it can't be, but gcc doesn't know
* that). So we initialize it. However, VALUE_TYPE can be various types,
* including int, PyObject*, and char[6], so it's a puzzle to spell
* initialization. It so happens that {0} is a valid initializer for all
* these types.
*/
VALUE_TYPE value = {0}; /* squash nuisance warning */
int result = -1; /* until proven innocent */
int copied = 1;
UNLESS (PER_USE(self)) return -1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS(copied)
return -1;
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (cmp == 0) {
/* The key exists, at index i. */
/* Copy the value early (if needed), so that in case of error a
* pile of bucket mutations don't need to be undone.
*/
if (v && !noval) {
COPY_VALUE_FROM_ARG(value, v, copied);
UNLESS(copied)
return -1;
}
if (v) {
/* The key exists at index i, and there's a new value.
* If unique, we're not supposed to replace it. If noval, or this
* is a set bucket (self->values is NULL), there's nothing to do.
*/
if (unique || noval || self->values == NULL) {
result = 0;
goto Done;
}
UNLESS (PER_USE(self))
return -1;
/* The key exists at index i, and we need to replace the value. */
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (cmp == 0)
{
/* The key exists, at index i. */
if (v)
{
/* The key exists at index i, and there's a new value.
* If unique, we're not supposed to replace it. If noval, or this
* is a set bucket (self->values is NULL), there's nothing to do.
*/
if (unique || noval || self->values == NULL)
{
result = 0;
goto Done;
}
/* The key exists at index i, and we need to replace the value. */
#ifdef VALUE_SAME
/* short-circuit if no change */
if (VALUE_SAME(self->values[i], value)) {
result = 0;
goto Done;
}
/* short-circuit if no change */
if (VALUE_SAME(self->values[i], value))
{
result = 0;
goto Done;
}
#endif
if (changed)
*changed = 1;
DECREF_VALUE(self->values[i]);
COPY_VALUE(self->values[i], value);
INCREF_VALUE(self->values[i]);
if (PER_CHANGED(self) >= 0)
result = 0;
goto Done;
}
if (changed)
*changed = 1;
DECREF_VALUE(self->values[i]);
COPY_VALUE(self->values[i], value);
INCREF_VALUE(self->values[i]);
if (PER_CHANGED(self) >= 0)
result = 0;
goto Done;
}
/* The key exists at index i, and should be deleted. */
DECREF_KEY(self->keys[i]);
self->len--;
if (i < self->len)
memmove(self->keys + i, self->keys + i+1,
sizeof(KEY_TYPE)*(self->len - i));
/* The key exists at index i, and should be deleted. */
DECREF_KEY(self->keys[i]);
self->len--;
if (i < self->len)
memmove(self->keys + i, self->keys + i+1,
sizeof(KEY_TYPE)*(self->len - i));
if (self->values) {
DECREF_VALUE(self->values[i]);
if (i < self->len)
memmove(self->values + i, self->values + i+1,
sizeof(VALUE_TYPE)*(self->len - i));
}
if (self->values)
{
DECREF_VALUE(self->values[i]);
if (i < self->len)
memmove(self->values + i, self->values + i+1,
sizeof(VALUE_TYPE)*(self->len - i));
}
if (! self->len) {
self->size = 0;
free(self->keys);
self->keys = NULL;
if (self->values) {
free(self->values);
self->values = NULL;
}
}
if (! self->len)
{
self->size = 0;
free(self->keys);
self->keys = NULL;
if (self->values)
{
free(self->values);
self->values = NULL;
}
}
if (changed)
*changed = 1;
if (PER_CHANGED(self) >= 0)
result = 1;
goto Done;
}
if (changed)
*changed = 1;
if (PER_CHANGED(self) >= 0)
result = 1;
goto Done;
}
/* The key doesn't exist, and belongs at index i. */
if (!v) {
/* Can't delete a non-existent key. */
PyErr_SetObject(PyExc_KeyError, keyarg);
goto Done;
}
/* The key doesn't exist, and belongs at index i. */
if (!v)
{
/* Can't delete a non-existent key. */
PyErr_SetObject(PyExc_KeyError, keyarg);
goto Done;
}
/* The key doesn't exist and should be inserted at index i. */
if (self->len == self->size && Bucket_grow(self, -1, noval) < 0)
goto Done;
/* The key doesn't exist and should be inserted at index i. */
if (self->len == self->size && Bucket_grow(self, -1, noval) < 0)
goto Done;
if (self->len > i) {
memmove(self->keys + i + 1, self->keys + i,
sizeof(KEY_TYPE) * (self->len - i));
if (self->values) {
memmove(self->values + i + 1, self->values + i,
sizeof(VALUE_TYPE) * (self->len - i));
if (self->len > i)
{
memmove(self->keys + i + 1, self->keys + i,
sizeof(KEY_TYPE) * (self->len - i));
if (self->values)
{
memmove(self->values + i + 1, self->values + i,
sizeof(VALUE_TYPE) * (self->len - i));
}
}
}
COPY_KEY(self->keys[i], key);
INCREF_KEY(self->keys[i]);
COPY_KEY(self->keys[i], key);
INCREF_KEY(self->keys[i]);
if (! noval) {
COPY_VALUE(self->values[i], value);
INCREF_VALUE(self->values[i]);
}
if (! noval)
{
COPY_VALUE(self->values[i], value);
INCREF_VALUE(self->values[i]);
}
self->len++;
if (changed)
*changed = 1;
if (PER_CHANGED(self) >= 0)
result = 1;
self->len++;
if (changed)
*changed = 1;
if (PER_CHANGED(self) >= 0)
result = 1;
Done:
PER_UNUSE(self);
return result;
Done:
PER_UNUSE(self);
return result;
}
/*
......@@ -428,19 +453,19 @@ _bucket_set(Bucket *self, PyObject *keyarg, PyObject *v,
**
** wrapper for _bucket_setitem (eliminates +1 return code)
**
** Arguments: self The bucket
** key The key to insert under
** v The value to insert
** Arguments: self The bucket
** key The key to insert under
** v The value to insert
**
** Returns 0 on success
** -1 on failure
** Returns 0 on success
** -1 on failure
*/
static int
bucket_setitem(Bucket *self, PyObject *key, PyObject *v)
{
if (_bucket_set(self, key, v, 0, 0, 0) < 0)
return -1;
return 0;
if (_bucket_set(self, key, v, 0, 0, 0) < 0)
return -1;
return 0;
}
/**
......@@ -450,71 +475,82 @@ bucket_setitem(Bucket *self, PyObject *key, PyObject *v)
static int
update_from_seq(PyObject *map, PyObject *seq)
{
PyObject *iter, *o, *k, *v;
int err = -1;
/* One path creates a new seq object. The other path has an
INCREF of the seq argument. So seq must always be DECREFed on
the way out.
*/
/* Use items() if it's not a sequence. Alas, PySequence_Check()
* returns true for a PeristentMapping or PersistentDict, and we
* want to use items() in those cases too.
*/
if (!PySequence_Check(seq) || /* or it "looks like a dict" */
PyObject_HasAttrString(seq, "iteritems")) {
PyObject *items;
items = PyObject_GetAttrString(seq, "items");
if (items == NULL)
return -1;
seq = PyObject_CallObject(items, NULL);
Py_DECREF(items);
if (seq == NULL)
return -1;
}
else
Py_INCREF(seq);
iter = PyObject_GetIter(seq);
if (iter == NULL)
goto err;
while (1) {
o = PyIter_Next(iter);
if (o == NULL) {
if (PyErr_Occurred())
goto err;
else
break;
}
if (!PyTuple_Check(o) || PyTuple_GET_SIZE(o) != 2) {
Py_DECREF(o);
PyErr_SetString(PyExc_TypeError,
"Sequence must contain 2-item tuples");
goto err;
PyObject *iter, *o, *k, *v;
int err = -1;
/* One path creates a new seq object. The other path has an
INCREF of the seq argument. So seq must always be DECREFed on
the way out.
*/
/* Use items() if it's not a sequence. Alas, PySequence_Check()
* returns true for a PeristentMapping or PersistentDict, and we
* want to use items() in those cases too.
*/
#ifdef PY3K
#define ITERITEMS "items"
#else
#define ITERITEMS "iteritems"
#endif
if (!PySequence_Check(seq) || /* or it "looks like a dict" */
PyObject_HasAttrString(seq, ITERITEMS))
#undef ITERITEMS
{
PyObject *items;
items = PyObject_GetAttrString(seq, "items");
if (items == NULL)
return -1;
seq = PyObject_CallObject(items, NULL);
Py_DECREF(items);
if (seq == NULL)
return -1;
}
k = PyTuple_GET_ITEM(o, 0);
v = PyTuple_GET_ITEM(o, 1);
if (PyObject_SetItem(map, k, v) < 0) {
Py_DECREF(o);
goto err;
else
Py_INCREF(seq);
iter = PyObject_GetIter(seq);
if (iter == NULL)
goto err;
while (1)
{
o = PyIter_Next(iter);
if (o == NULL)
{
if (PyErr_Occurred())
goto err;
else
break;
}
if (!PyTuple_Check(o) || PyTuple_GET_SIZE(o) != 2)
{
Py_DECREF(o);
PyErr_SetString(PyExc_TypeError,
"Sequence must contain 2-item tuples");
goto err;
}
k = PyTuple_GET_ITEM(o, 0);
v = PyTuple_GET_ITEM(o, 1);
if (PyObject_SetItem(map, k, v) < 0)
{
Py_DECREF(o);
goto err;
}
Py_DECREF(o);
}
Py_DECREF(o);
}
err = 0;
err:
Py_DECREF(iter);
Py_DECREF(seq);
return err;
err = 0;
err:
Py_DECREF(iter);
Py_DECREF(seq);
return err;
}
static PyObject *
Mapping_update(PyObject *self, PyObject *seq)
{
if (update_from_seq(self, seq) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
if (update_from_seq(self, seq) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
/*
......@@ -522,52 +558,52 @@ Mapping_update(PyObject *self, PyObject *seq)
**
** Splits one bucket into two
**
** Arguments: self The bucket
** index the index of the key to split at (O.O.B use midpoint)
** next the new bucket to split into
** Arguments: self The bucket
** index the index of the key to split at (O.O.B use midpoint)
** next the new bucket to split into
**
** Returns: 0 on success
** -1 on failure
** Returns: 0 on success
** -1 on failure
*/
static int
bucket_split(Bucket *self, int index, Bucket *next)
{
int next_size;
int next_size;
ASSERT(self->len > 1, "split of empty bucket", -1);
ASSERT(self->len > 1, "split of empty bucket", -1);
if (index < 0 || index >= self->len)
index = self->len / 2;
if (index < 0 || index >= self->len)
index = self->len / 2;
next_size = self->len - index;
next_size = self->len - index;
next->keys = BTree_Malloc(sizeof(KEY_TYPE) * next_size);
if (!next->keys)
return -1;
memcpy(next->keys, self->keys + index, sizeof(KEY_TYPE) * next_size);
if (self->values) {
next->values = BTree_Malloc(sizeof(VALUE_TYPE) * next_size);
if (!next->values) {
free(next->keys);
next->keys = NULL;
return -1;
next->keys = BTree_Malloc(sizeof(KEY_TYPE) * next_size);
if (!next->keys)
return -1;
memcpy(next->keys, self->keys + index, sizeof(KEY_TYPE) * next_size);
if (self->values) {
next->values = BTree_Malloc(sizeof(VALUE_TYPE) * next_size);
if (!next->values) {
free(next->keys);
next->keys = NULL;
return -1;
}
memcpy(next->values, self->values + index,
sizeof(VALUE_TYPE) * next_size);
}
memcpy(next->values, self->values + index,
sizeof(VALUE_TYPE) * next_size);
}
next->size = next_size;
next->len = next_size;
self->len = index;
next->size = next_size;
next->len = next_size;
self->len = index;
next->next = self->next;
next->next = self->next;
Py_INCREF(next);
self->next = next;
Py_INCREF(next);
self->next = next;
if (PER_CHANGED(self) < 0)
return -1;
if (PER_CHANGED(self) < 0)
return -1;
return 0;
return 0;
}
/* Set self->next to self->next->next, i.e. unlink self's successor from
......@@ -580,31 +616,33 @@ bucket_split(Bucket *self, int index, Bucket *next)
static int
Bucket_deleteNextBucket(Bucket *self)
{
int result = -1; /* until proven innocent */
Bucket *successor;
PER_USE_OR_RETURN(self, -1);
successor = self->next;
if (successor) {
Bucket *next;
/* Before: self -> successor -> next
* After: self --------------> next
*/
UNLESS (PER_USE(successor)) goto Done;
next = successor->next;
PER_UNUSE(successor);
int result = -1; /* until proven innocent */
Bucket *successor;
Py_XINCREF(next); /* it may be NULL, of course */
self->next = next;
Py_DECREF(successor);
if (PER_CHANGED(self) < 0)
goto Done;
}
result = 0;
PER_USE_OR_RETURN(self, -1);
successor = self->next;
if (successor)
{
Bucket *next;
/* Before: self -> successor -> next
* After: self --------------> next
*/
UNLESS (PER_USE(successor))
goto Done;
next = successor->next;
PER_UNUSE(successor);
Py_XINCREF(next); /* it may be NULL, of course */
self->next = next;
Py_DECREF(successor);
if (PER_CHANGED(self) < 0)
goto Done;
}
result = 0;
Done:
PER_UNUSE(self);
return result;
Done:
PER_UNUSE(self);
return result;
}
/*
......@@ -647,157 +685,173 @@ static int
Bucket_findRangeEnd(Bucket *self, PyObject *keyarg, int low, int exclude_equal,
int *offset)
{
int i, cmp;
int result = -1; /* until proven innocent */
KEY_TYPE key;
int copied = 1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS (copied) return -1;
UNLESS (PER_USE(self)) return -1;
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (cmp == 0) {
/* exact match at index i */
if (exclude_equal) {
/* but we don't want an exact match */
if (low)
++i;
else
--i;
int i, cmp;
int result = -1; /* until proven innocent */
KEY_TYPE key;
int copied = 1;
COPY_KEY_FROM_ARG(key, keyarg, copied);
UNLESS (copied)
return -1;
UNLESS (PER_USE(self))
return -1;
BUCKET_SEARCH(i, cmp, self, key, goto Done);
if (cmp == 0) {
/* exact match at index i */
if (exclude_equal)
{
/* but we don't want an exact match */
if (low)
++i;
else
--i;
}
}
}
/* Else keys[i-1] < key < keys[i], picturing infinities at OOB indices,
* and i has the smallest item > key, which is correct for low.
*/
else if (! low)
/* i-1 has the largest item < key (unless i-1 is 0OB) */
--i;
result = 0 <= i && i < self->len;
if (result)
*offset = i;
Done:
PER_UNUSE(self);
return result;
/* Else keys[i-1] < key < keys[i], picturing infinities at OOB indices,
* and i has the smallest item > key, which is correct for low.
*/
else if (! low)
/* i-1 has the largest item < key (unless i-1 is 0OB) */
--i;
result = 0 <= i && i < self->len;
if (result)
*offset = i;
Done:
PER_UNUSE(self);
return result;
}
static PyObject *
Bucket_maxminKey(Bucket *self, PyObject *args, int min)
{
PyObject *key=0;
int rc, offset = 0;
int empty_bucket = 1;
PyObject *key=0;
int rc, offset = 0;
int empty_bucket = 1;
if (args && ! PyArg_ParseTuple(args, "|O", &key)) return NULL;
if (args && ! PyArg_ParseTuple(args, "|O", &key))
return NULL;
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
UNLESS (self->len) goto empty;
UNLESS (self->len)
goto empty;
/* Find the low range */
if (key)
/* Find the low range */
if (key)
{
if ((rc = Bucket_findRangeEnd(self, key, min, 0, &offset)) <= 0)
if ((rc = Bucket_findRangeEnd(self, key, min, 0, &offset)) <= 0)
{
if (rc < 0) return NULL;
empty_bucket = 0;
goto empty;
if (rc < 0)
return NULL;
empty_bucket = 0;
goto empty;
}
}
else if (min) offset = 0;
else offset = self->len -1;
else if (min)
offset = 0;
else
offset = self->len -1;
COPY_KEY_TO_OBJECT(key, self->keys[offset]);
PER_UNUSE(self);
COPY_KEY_TO_OBJECT(key, self->keys[offset]);
PER_UNUSE(self);
return key;
return key;
empty:
PyErr_SetString(PyExc_ValueError,
empty_bucket ? "empty bucket" :
"no key satisfies the conditions");
PER_UNUSE(self);
return NULL;
empty:
PyErr_SetString(PyExc_ValueError,
empty_bucket ? "empty bucket" :
"no key satisfies the conditions");
PER_UNUSE(self);
return NULL;
}
static PyObject *
Bucket_minKey(Bucket *self, PyObject *args)
{
return Bucket_maxminKey(self, args, 1);
return Bucket_maxminKey(self, args, 1);
}
static PyObject *
Bucket_maxKey(Bucket *self, PyObject *args)
{
return Bucket_maxminKey(self, args, 0);
return Bucket_maxminKey(self, args, 0);
}
static int
Bucket_rangeSearch(Bucket *self, PyObject *args, PyObject *kw,
int *low, int *high)
{
PyObject *min = Py_None;
PyObject *max = Py_None;
int excludemin = 0;
int excludemax = 0;
int rc;
if (args) {
if (! PyArg_ParseTupleAndKeywords(args, kw, "|OOii", search_keywords,
&min,
&max,
&excludemin,
&excludemax))
return -1;
}
PyObject *min = Py_None;
PyObject *max = Py_None;
int excludemin = 0;
int excludemax = 0;
int rc;
UNLESS (self->len) goto empty;
if (args)
{
if (! PyArg_ParseTupleAndKeywords(args, kw, "|OOii", search_keywords,
&min,
&max,
&excludemin,
&excludemax))
return -1;
}
/* Find the low range */
if (min != Py_None) {
rc = Bucket_findRangeEnd(self, min, 1, excludemin, low);
if (rc < 0)
return -1;
if (rc == 0)
goto empty;
}
else {
*low = 0;
if (excludemin) {
if (self->len < 2)
UNLESS (self->len)
goto empty;
++*low;
/* Find the low range */
if (min != Py_None)
{
rc = Bucket_findRangeEnd(self, min, 1, excludemin, low);
if (rc < 0)
return -1;
if (rc == 0)
goto empty;
}
else
{
*low = 0;
if (excludemin)
{
if (self->len < 2)
goto empty;
++*low;
}
}
}
/* Find the high range */
if (max != Py_None) {
rc = Bucket_findRangeEnd(self, max, 0, excludemax, high);
if (rc < 0)
return -1;
if (rc == 0)
goto empty;
}
else {
*high = self->len - 1;
if (excludemax) {
if (self->len < 2)
goto empty;
--*high;
/* Find the high range */
if (max != Py_None)
{
rc = Bucket_findRangeEnd(self, max, 0, excludemax, high);
if (rc < 0)
return -1;
if (rc == 0)
goto empty;
}
else
{
*high = self->len - 1;
if (excludemax)
{
if (self->len < 2)
goto empty;
--*high;
}
}
}
/* If min < max to begin with, it's quite possible that low > high now. */
if (*low <= *high)
return 0;
/* If min < max to begin with, it's quite possible that low > high now. */
if (*low <= *high)
return 0;
empty:
*low = 0;
*high = -1;
return 0;
empty:
*low = 0;
*high = -1;
return 0;
}
/*
......@@ -805,39 +859,40 @@ Bucket_rangeSearch(Bucket *self, PyObject *args, PyObject *kw,
**
** Generate a list of all keys in the bucket
**
** Arguments: self The Bucket
** args (unused)
** Arguments: self The Bucket
** args (unused)
**
** Returns: list of bucket keys
** Returns: list of bucket keys
*/
static PyObject *
bucket_keys(Bucket *self, PyObject *args, PyObject *kw)
{
PyObject *r = NULL, *key;
int i, low, high;
PyObject *r = NULL, *key;
int i, low, high;
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0)
goto err;
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0)
goto err;
r = PyList_New(high-low+1);
if (r == NULL)
goto err;
r = PyList_New(high-low+1);
if (r == NULL)
goto err;
for (i=low; i <= high; i++) {
COPY_KEY_TO_OBJECT(key, self->keys[i]);
if (PyList_SetItem(r, i-low , key) < 0)
goto err;
}
for (i=low; i <= high; i++)
{
COPY_KEY_TO_OBJECT(key, self->keys[i]);
if (PyList_SetItem(r, i-low , key) < 0)
goto err;
}
PER_UNUSE(self);
return r;
PER_UNUSE(self);
return r;
err:
PER_UNUSE(self);
Py_XDECREF(r);
return NULL;
err:
PER_UNUSE(self);
Py_XDECREF(r);
return NULL;
}
/*
......@@ -845,37 +900,41 @@ bucket_keys(Bucket *self, PyObject *args, PyObject *kw)
**
** Generate a list of all values in the bucket
**
** Arguments: self The Bucket
** args (unused)
** Arguments: self The Bucket
** args (unused)
**
** Returns list of values
** Returns list of values
*/
static PyObject *
bucket_values(Bucket *self, PyObject *args, PyObject *kw)
{
PyObject *r=0, *v;
int i, low, high;
PyObject *r=0, *v;
int i, low, high;
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0) goto err;
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0)
goto err;
UNLESS (r=PyList_New(high-low+1)) goto err;
UNLESS (r=PyList_New(high-low+1))
goto err;
for (i=low; i <= high; i++)
for (i=low; i <= high; i++)
{
COPY_VALUE_TO_OBJECT(v, self->values[i]);
UNLESS (v) goto err;
if (PyList_SetItem(r, i-low, v) < 0) goto err;
COPY_VALUE_TO_OBJECT(v, self->values[i]);
UNLESS (v)
goto err;
if (PyList_SetItem(r, i-low, v) < 0)
goto err;
}
PER_UNUSE(self);
return r;
PER_UNUSE(self);
return r;
err:
PER_UNUSE(self);
Py_XDECREF(r);
return NULL;
err:
PER_UNUSE(self);
Py_XDECREF(r);
return NULL;
}
/*
......@@ -883,214 +942,238 @@ bucket_values(Bucket *self, PyObject *args, PyObject *kw)
**
** Returns a list of all items in a bucket
**
** Arguments: self The Bucket
** args (unused)
** Arguments: self The Bucket
** args (unused)
**
** Returns: list of all items in the bucket
** Returns: list of all items in the bucket
*/
static PyObject *
bucket_items(Bucket *self, PyObject *args, PyObject *kw)
{
PyObject *r=0, *o=0, *item=0;
int i, low, high;
PyObject *r=0, *o=0, *item=0;
int i, low, high;
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0) goto err;
if (Bucket_rangeSearch(self, args, kw, &low, &high) < 0)
goto err;
UNLESS (r=PyList_New(high-low+1)) goto err;
UNLESS (r=PyList_New(high-low+1))
goto err;
for (i=low; i <= high; i++)
for (i=low; i <= high; i++)
{
UNLESS (item = PyTuple_New(2)) goto err;
UNLESS (item = PyTuple_New(2))
goto err;
COPY_KEY_TO_OBJECT(o, self->keys[i]);
UNLESS (o) goto err;
PyTuple_SET_ITEM(item, 0, o);
COPY_KEY_TO_OBJECT(o, self->keys[i]);
UNLESS (o)
goto err;
PyTuple_SET_ITEM(item, 0, o);
COPY_VALUE_TO_OBJECT(o, self->values[i]);
UNLESS (o) goto err;
PyTuple_SET_ITEM(item, 1, o);
COPY_VALUE_TO_OBJECT(o, self->values[i]);
UNLESS (o)
goto err;
PyTuple_SET_ITEM(item, 1, o);
if (PyList_SetItem(r, i-low, item) < 0) goto err;
if (PyList_SetItem(r, i-low, item) < 0)
goto err;
item = 0;
item = 0;
}
PER_UNUSE(self);
return r;
PER_UNUSE(self);
return r;
err:
PER_UNUSE(self);
Py_XDECREF(r);
Py_XDECREF(item);
return NULL;
err:
PER_UNUSE(self);
Py_XDECREF(r);
Py_XDECREF(item);
return NULL;
}
static PyObject *
bucket_byValue(Bucket *self, PyObject *omin)
{
PyObject *r=0, *o=0, *item=0;
VALUE_TYPE min;
VALUE_TYPE v;
int i, l, copied=1;
PyObject *r=0, *o=0, *item=0;
VALUE_TYPE min;
VALUE_TYPE v;
int i, l, copied=1;
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
COPY_VALUE_FROM_ARG(min, omin, copied);
UNLESS(copied) return NULL;
COPY_VALUE_FROM_ARG(min, omin, copied);
UNLESS(copied)
return NULL;
for (i=0, l=0; i < self->len; i++)
if (TEST_VALUE(self->values[i], min) >= 0)
l++;
for (i=0, l=0; i < self->len; i++)
if (TEST_VALUE(self->values[i], min) >= 0)
l++;
UNLESS (r=PyList_New(l)) goto err;
UNLESS (r=PyList_New(l))
goto err;
for (i=0, l=0; i < self->len; i++)
for (i=0, l=0; i < self->len; i++)
{
if (TEST_VALUE(self->values[i], min) < 0) continue;
UNLESS (item = PyTuple_New(2)) goto err;
COPY_KEY_TO_OBJECT(o, self->keys[i]);
UNLESS (o) goto err;
PyTuple_SET_ITEM(item, 1, o);
COPY_VALUE(v, self->values[i]);
NORMALIZE_VALUE(v, min);
COPY_VALUE_TO_OBJECT(o, v);
DECREF_VALUE(v);
UNLESS (o) goto err;
PyTuple_SET_ITEM(item, 0, o);
if (TEST_VALUE(self->values[i], min) < 0)
continue;
UNLESS (item = PyTuple_New(2))
goto err;
COPY_KEY_TO_OBJECT(o, self->keys[i]);
UNLESS (o)
goto err;
PyTuple_SET_ITEM(item, 1, o);
COPY_VALUE(v, self->values[i]);
NORMALIZE_VALUE(v, min);
COPY_VALUE_TO_OBJECT(o, v);
DECREF_VALUE(v);
UNLESS (o)
goto err;
PyTuple_SET_ITEM(item, 0, o);
if (PyList_SetItem(r, l, item) < 0)
goto err;
l++;
item = 0;
}
if (PyList_SetItem(r, l, item) < 0) goto err;
l++;
item=PyObject_GetAttr(r,sort_str);
UNLESS (item)
goto err;
ASSIGN(item, PyObject_CallObject(item, NULL));
UNLESS (item)
goto err;
ASSIGN(item, PyObject_GetAttr(r, reverse_str));
UNLESS (item)
goto err;
ASSIGN(item, PyObject_CallObject(item, NULL));
UNLESS (item)
goto err;
Py_DECREF(item);
item = 0;
}
PER_UNUSE(self);
return r;
item=PyObject_GetAttr(r,sort_str);
UNLESS (item) goto err;
ASSIGN(item, PyObject_CallObject(item, NULL));
UNLESS (item) goto err;
ASSIGN(item, PyObject_GetAttr(r, reverse_str));
UNLESS (item) goto err;
ASSIGN(item, PyObject_CallObject(item, NULL));
UNLESS (item) goto err;
Py_DECREF(item);
PER_UNUSE(self);
return r;
err:
PER_UNUSE(self);
Py_XDECREF(r);
Py_XDECREF(item);
return NULL;
err:
PER_UNUSE(self);
Py_XDECREF(r);
Py_XDECREF(item);
return NULL;
}
static int
_bucket_clear(Bucket *self)
{
const int len = self->len;
/* Don't declare i at this level. If neither keys nor values are
* PyObject*, i won't be referenced, and you'll get a nuisance compiler
* wng for declaring it here.
*/
self->len = self->size = 0;
if (self->next) {
Py_DECREF(self->next);
self->next = NULL;
}
const int len = self->len;
/* Don't declare i at this level. If neither keys nor values are
* PyObject*, i won't be referenced, and you'll get a nuisance compiler
* wng for declaring it here.
*/
self->len = self->size = 0;
if (self->next)
{
Py_DECREF(self->next);
self->next = NULL;
}
/* Silence compiler warning about unused variable len for the case
when neither key nor value is an object, i.e. II. */
(void)len;
/* Silence compiler warning about unused variable len for the case
when neither key nor value is an object, i.e. II. */
(void)len;
if (self->keys) {
if (self->keys)
{
#ifdef KEY_TYPE_IS_PYOBJECT
int i;
for (i = 0; i < len; ++i)
DECREF_KEY(self->keys[i]);
int i;
for (i = 0; i < len; ++i)
DECREF_KEY(self->keys[i]);
#endif
free(self->keys);
self->keys = NULL;
}
free(self->keys);
self->keys = NULL;
}
if (self->values) {
if (self->values)
{
#ifdef VALUE_TYPE_IS_PYOBJECT
int i;
for (i = 0; i < len; ++i)
DECREF_VALUE(self->values[i]);
int i;
for (i = 0; i < len; ++i)
DECREF_VALUE(self->values[i]);
#endif
free(self->values);
self->values = NULL;
}
return 0;
free(self->values);
self->values = NULL;
}
return 0;
}
#ifdef PERSISTENT
static PyObject *
bucket__p_deactivate(Bucket *self, PyObject *args, PyObject *keywords)
{
int ghostify = 1;
PyObject *force = NULL;
int ghostify = 1;
PyObject *force = NULL;
if (args && PyTuple_GET_SIZE(args) > 0) {
PyErr_SetString(PyExc_TypeError,
"_p_deactivate takes no positional arguments");
return NULL;
}
if (keywords) {
int size = PyDict_Size(keywords);
force = PyDict_GetItemString(keywords, "force");
if (force)
size--;
if (size) {
PyErr_SetString(PyExc_TypeError,
"_p_deactivate only accepts keyword arg force");
return NULL;
}
}
if (self->jar && self->oid) {
ghostify = self->state == cPersistent_UPTODATE_STATE;
if (!ghostify && force) {
if (PyObject_IsTrue(force))
ghostify = 1;
if (PyErr_Occurred())
if (args && PyTuple_GET_SIZE(args) > 0)
{
PyErr_SetString(PyExc_TypeError,
"_p_deactivate takes no positional arguments");
return NULL;
}
if (ghostify) {
if (_bucket_clear(self) < 0)
return NULL;
PER_GHOSTIFY(self);
if (keywords)
{
int size = PyDict_Size(keywords);
force = PyDict_GetItemString(keywords, "force");
if (force)
size--;
if (size) {
PyErr_SetString(PyExc_TypeError,
"_p_deactivate only accepts keyword arg force");
return NULL;
}
}
}
Py_INCREF(Py_None);
return Py_None;
if (self->jar && self->oid)
{
ghostify = self->state == cPersistent_UPTODATE_STATE;
if (!ghostify && force) {
if (PyObject_IsTrue(force))
ghostify = 1;
if (PyErr_Occurred())
return NULL;
}
if (ghostify) {
if (_bucket_clear(self) < 0)
return NULL;
PER_GHOSTIFY(self);
}
}
Py_INCREF(Py_None);
return Py_None;
}
#endif
static PyObject *
bucket_clear(Bucket *self, PyObject *args)
{
PER_USE_OR_RETURN(self, NULL);
PER_USE_OR_RETURN(self, NULL);
if (self->len) {
if (_bucket_clear(self) < 0)
return NULL;
if (PER_CHANGED(self) < 0)
goto err;
}
PER_UNUSE(self);
Py_INCREF(Py_None);
return Py_None;
if (self->len)
{
if (_bucket_clear(self) < 0)
return NULL;
if (PER_CHANGED(self) < 0)
goto err;
}
PER_UNUSE(self);
Py_INCREF(Py_None);
return Py_None;
err:
PER_UNUSE(self);
return NULL;
err:
PER_UNUSE(self);
return NULL;
}
/*
......@@ -1119,178 +1202,181 @@ bucket_clear(Bucket *self, PyObject *args)
static PyObject *
bucket_getstate(Bucket *self)
{
PyObject *o = NULL, *items = NULL, *state;
int i, len, l;
PER_USE_OR_RETURN(self, NULL);
PyObject *o = NULL, *items = NULL, *state;
int i, len, l;
len = self->len;
PER_USE_OR_RETURN(self, NULL);
if (self->values) { /* Bucket */
items = PyTuple_New(len * 2);
if (items == NULL)
goto err;
for (i = 0, l = 0; i < len; i++) {
COPY_KEY_TO_OBJECT(o, self->keys[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, l, o);
l++;
len = self->len;
COPY_VALUE_TO_OBJECT(o, self->values[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, l, o);
l++;
if (self->values) /* Bucket */
{
items = PyTuple_New(len * 2);
if (items == NULL)
goto err;
for (i = 0, l = 0; i < len; i++) {
COPY_KEY_TO_OBJECT(o, self->keys[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, l, o);
l++;
COPY_VALUE_TO_OBJECT(o, self->values[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, l, o);
l++;
}
}
} else { /* Set */
items = PyTuple_New(len);
if (items == NULL)
goto err;
for (i = 0; i < len; i++) {
COPY_KEY_TO_OBJECT(o, self->keys[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, i, o);
else /* Set */
{
items = PyTuple_New(len);
if (items == NULL)
goto err;
for (i = 0; i < len; i++) {
COPY_KEY_TO_OBJECT(o, self->keys[i]);
if (o == NULL)
goto err;
PyTuple_SET_ITEM(items, i, o);
}
}
}
if (self->next)
state = Py_BuildValue("OO", items, self->next);
else
state = Py_BuildValue("(O)", items);
Py_DECREF(items);
if (self->next)
state = Py_BuildValue("OO", items, self->next);
else
state = Py_BuildValue("(O)", items);
Py_DECREF(items);
PER_UNUSE(self);
return state;
PER_UNUSE(self);
return state;
err:
PER_UNUSE(self);
Py_XDECREF(items);
return NULL;
err:
PER_UNUSE(self);
Py_XDECREF(items);
return NULL;
}
static int
_bucket_setstate(Bucket *self, PyObject *state)
{
PyObject *k, *v, *items;
Bucket *next = NULL;
int i, l, len, copied=1;
KEY_TYPE *keys;
VALUE_TYPE *values;
PyObject *k, *v, *items;
Bucket *next = NULL;
int i, l, len, copied=1;
KEY_TYPE *keys;
VALUE_TYPE *values;
if (!PyArg_ParseTuple(state, "O|O:__setstate__", &items, &next))
return -1;
if (!PyArg_ParseTuple(state, "O|O:__setstate__", &items, &next))
return -1;
if (!PyTuple_Check(items)) {
PyErr_SetString(PyExc_TypeError,
"tuple required for first state element");
return -1;
}
if (!PyTuple_Check(items)) {
PyErr_SetString(PyExc_TypeError,
"tuple required for first state element");
return -1;
}
len = PyTuple_Size(items);
if (len < 0)
return -1;
len /= 2;
len = PyTuple_Size(items);
if (len < 0)
return -1;
len /= 2;
for (i = self->len; --i >= 0; ) {
DECREF_KEY(self->keys[i]);
DECREF_VALUE(self->values[i]);
}
self->len = 0;
for (i = self->len; --i >= 0; ) {
DECREF_KEY(self->keys[i]);
DECREF_VALUE(self->values[i]);
}
self->len = 0;
if (self->next) {
Py_DECREF(self->next);
self->next = NULL;
}
if (self->next) {
Py_DECREF(self->next);
self->next = NULL;
}
if (len > self->size) {
keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE)*len);
if (keys == NULL)
return -1;
values = BTree_Realloc(self->values, sizeof(VALUE_TYPE)*len);
if (values == NULL)
return -1;
self->keys = keys;
self->values = values;
self->size = len;
}
if (len > self->size) {
keys = BTree_Realloc(self->keys, sizeof(KEY_TYPE)*len);
if (keys == NULL)
return -1;
values = BTree_Realloc(self->values, sizeof(VALUE_TYPE)*len);
if (values == NULL)
return -1;
self->keys = keys;
self->values = values;
self->size = len;
}
for (i=0, l=0; i < len; i++) {
k = PyTuple_GET_ITEM(items, l);
l++;
v = PyTuple_GET_ITEM(items, l);
l++;
COPY_KEY_FROM_ARG(self->keys[i], k, copied);
if (!copied)
return -1;
COPY_VALUE_FROM_ARG(self->values[i], v, copied);
if (!copied)
return -1;
INCREF_KEY(self->keys[i]);
INCREF_VALUE(self->values[i]);
}
for (i=0, l=0; i < len; i++) {
k = PyTuple_GET_ITEM(items, l);
l++;
v = PyTuple_GET_ITEM(items, l);
l++;
COPY_KEY_FROM_ARG(self->keys[i], k, copied);
if (!copied)
return -1;
COPY_VALUE_FROM_ARG(self->values[i], v, copied);
if (!copied)
return -1;
INCREF_KEY(self->keys[i]);
INCREF_VALUE(self->values[i]);
}
self->len = len;
self->len = len;
if (next) {
self->next = next;
Py_INCREF(next);
}
if (next) {
self->next = next;
Py_INCREF(next);
}
return 0;
return 0;
}
static PyObject *
bucket_setstate(Bucket *self, PyObject *state)
{
int r;
int r;
PER_PREVENT_DEACTIVATION(self);
r = _bucket_setstate(self, state);
PER_UNUSE(self);
PER_PREVENT_DEACTIVATION(self);
r = _bucket_setstate(self, state);
PER_UNUSE(self);
if (r < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
if (r < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
bucket_has_key(Bucket *self, PyObject *key)
{
return _bucket_get(self, key, 1);
return _bucket_get(self, key, 1);
}
static PyObject *
bucket_setdefault(Bucket *self, PyObject *args)
{
PyObject *key;
PyObject *failobj; /* default */
PyObject *value; /* return value */
int dummy_changed; /* in order to call _bucket_set */
PyObject *key;
PyObject *failobj; /* default */
PyObject *value; /* return value */
int dummy_changed; /* in order to call _bucket_set */
if (! PyArg_UnpackTuple(args, "setdefault", 2, 2, &key, &failobj))
return NULL;
if (! PyArg_UnpackTuple(args, "setdefault", 2, 2, &key, &failobj))
return NULL;
value = _bucket_get(self, key, 0);
if (value != NULL)
return value;
value = _bucket_get(self, key, 0);
if (value != NULL)
return value;
/* The key isn't in the bucket. If that's not due to a KeyError exception,
* pass back the unexpected exception.
*/
if (! PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
PyErr_Clear();
/* Associate `key` with `failobj` in the bucket, and return `failobj`. */
value = failobj;
if (_bucket_set(self, key, failobj, 0, 0, &dummy_changed) < 0)
value = NULL;
Py_XINCREF(value);
return value;
/* The key isn't in the bucket. If that's not due to a KeyError exception,
* pass back the unexpected exception.
*/
if (! PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
PyErr_Clear();
/* Associate `key` with `failobj` in the bucket, and return `failobj`. */
value = failobj;
if (_bucket_set(self, key, failobj, 0, 0, &dummy_changed) < 0)
value = NULL;
Py_XINCREF(value);
return value;
}
......@@ -1301,43 +1387,43 @@ Bucket_length(Bucket *self);
static PyObject *
bucket_pop(Bucket *self, PyObject *args)
{
PyObject *key;
PyObject *failobj = NULL; /* default */
PyObject *value; /* return value */
int dummy_changed; /* in order to call _bucket_set */
PyObject *key;
PyObject *failobj = NULL; /* default */
PyObject *value; /* return value */
int dummy_changed; /* in order to call _bucket_set */
if (! PyArg_UnpackTuple(args, "pop", 1, 2, &key, &failobj))
return NULL;
if (! PyArg_UnpackTuple(args, "pop", 1, 2, &key, &failobj))
return NULL;
value = _bucket_get(self, key, 0);
if (value != NULL) {
/* Delete key and associated value. */
if (_bucket_set(self, key, NULL, 0, 0, &dummy_changed) < 0) {
Py_DECREF(value);
return NULL;
value = _bucket_get(self, key, 0);
if (value != NULL) {
/* Delete key and associated value. */
if (_bucket_set(self, key, NULL, 0, 0, &dummy_changed) < 0) {
Py_DECREF(value);
return NULL;
}
return value;
}
return value;
}
/* The key isn't in the bucket. If that's not due to a KeyError exception,
* pass back the unexpected exception.
*/
if (! PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
/* The key isn't in the bucket. If that's not due to a KeyError exception,
* pass back the unexpected exception.
*/
if (! PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
if (failobj != NULL) {
/* Clear the KeyError and return the explicit default. */
PyErr_Clear();
Py_INCREF(failobj);
return failobj;
}
if (failobj != NULL) {
/* Clear the KeyError and return the explicit default. */
PyErr_Clear();
Py_INCREF(failobj);
return failobj;
}
/* No default given. The only difference in this case is the error
* message, which depends on whether the bucket is empty.
*/
if (Bucket_length(self) == 0)
PyErr_SetString(PyExc_KeyError, "pop(): Bucket is empty");
return NULL;
/* No default given. The only difference in this case is the error
* message, which depends on whether the bucket is empty.
*/
if (Bucket_length(self) == 0)
PyErr_SetString(PyExc_KeyError, "pop(): Bucket is empty");
return NULL;
}
/* Search bucket self for key. This is the sq_contains slot of the
......@@ -1351,14 +1437,14 @@ bucket_pop(Bucket *self, PyObject *args)
static int
bucket_contains(Bucket *self, PyObject *key)
{
PyObject *asobj = _bucket_get(self, key, 1);
int result = -1;
PyObject *asobj = _bucket_get(self, key, 1);
int result = -1;
if (asobj != NULL) {
result = PyInt_AsLong(asobj) ? 1 : 0;
Py_DECREF(asobj);
}
return result;
if (asobj != NULL) {
result = INT_AS_LONG(asobj) ? 1 : 0;
Py_DECREF(asobj);
}
return result;
}
/*
......@@ -1368,18 +1454,18 @@ bucket_contains(Bucket *self, PyObject *key)
static PyObject *
bucket_getm(Bucket *self, PyObject *args)
{
PyObject *key, *d=Py_None, *r;
PyObject *key, *d=Py_None, *r;
if (!PyArg_ParseTuple(args, "O|O:get", &key, &d))
return NULL;
r = _bucket_get(self, key, 0);
if (r)
return r;
if (!PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
PyErr_Clear();
Py_INCREF(d);
return d;
if (!PyArg_ParseTuple(args, "O|O:get", &key, &d))
return NULL;
r = _bucket_get(self, key, 0);
if (r)
return r;
if (!PyErr_ExceptionMatches(PyExc_KeyError))
return NULL;
PyErr_Clear();
Py_INCREF(d);
return d;
}
/**************************************************************************/
......@@ -1394,52 +1480,53 @@ bucket_getm(Bucket *self, PyObject *args)
static PyObject *
buildBucketIter(Bucket *self, PyObject *args, PyObject *kw, char kind)
{
BTreeItems *items;
int lowoffset, highoffset;
BTreeIter *result = NULL;
BTreeItems *items;
int lowoffset, highoffset;
BTreeIter *result = NULL;
PER_USE_OR_RETURN(self, NULL);
if (Bucket_rangeSearch(self, args, kw, &lowoffset, &highoffset) < 0)
goto Done;
PER_USE_OR_RETURN(self, NULL);
if (Bucket_rangeSearch(self, args, kw, &lowoffset, &highoffset) < 0)
goto Done;
items = (BTreeItems *)newBTreeItems(kind, self, lowoffset,
self, highoffset);
if (items == NULL) goto Done;
items = (BTreeItems *)newBTreeItems(kind, self, lowoffset,
self, highoffset);
if (items == NULL)
goto Done;
result = BTreeIter_new(items); /* win or lose, we're done */
Py_DECREF(items);
result = BTreeIter_new(items); /* win or lose, we're done */
Py_DECREF(items);
Done:
PER_UNUSE(self);
return (PyObject *)result;
Done:
PER_UNUSE(self);
return (PyObject *)result;
}
/* The implementation of iter(Bucket_or_Set); the Bucket tp_iter slot. */
static PyObject *
Bucket_getiter(Bucket *self)
{
return buildBucketIter(self, NULL, NULL, 'k');
return buildBucketIter(self, NULL, NULL, 'k');
}
/* The implementation of Bucket.iterkeys(). */
static PyObject *
Bucket_iterkeys(Bucket *self, PyObject *args, PyObject *kw)
{
return buildBucketIter(self, args, kw, 'k');
return buildBucketIter(self, args, kw, 'k');
}
/* The implementation of Bucket.itervalues(). */
static PyObject *
Bucket_itervalues(Bucket *self, PyObject *args, PyObject *kw)
{
return buildBucketIter(self, args, kw, 'v');
return buildBucketIter(self, args, kw, 'v');
}
/* The implementation of Bucket.iteritems(). */
static PyObject *
Bucket_iteritems(Bucket *self, PyObject *args, PyObject *kw)
{
return buildBucketIter(self, args, kw, 'i');
return buildBucketIter(self, args, kw, 'i');
}
/* End of iterator support. */
......@@ -1451,61 +1538,61 @@ static PyObject *bucket_merge(Bucket *s1, Bucket *s2, Bucket *s3);
static PyObject *
_bucket__p_resolveConflict(PyObject *ob_type, PyObject *s[3])
{
PyObject *result = NULL; /* guilty until proved innocent */
Bucket *b[3] = {NULL, NULL, NULL};
PyObject *meth = NULL;
PyObject *a = NULL;
int i;
for (i = 0; i < 3; i++) {
PyObject *r;
b[i] = (Bucket*)PyObject_CallObject((PyObject *)ob_type, NULL);
if (b[i] == NULL)
goto Done;
if (s[i] == Py_None) /* None is equivalent to empty, for BTrees */
continue;
meth = PyObject_GetAttr((PyObject *)b[i], __setstate___str);
if (meth == NULL)
goto Done;
a = PyTuple_New(1);
if (a == NULL)
goto Done;
PyTuple_SET_ITEM(a, 0, s[i]);
Py_INCREF(s[i]);
r = PyObject_CallObject(meth, a); /* b[i].__setstate__(s[i]) */
if (r == NULL)
goto Done;
Py_DECREF(r);
Py_DECREF(a);
Py_DECREF(meth);
a = meth = NULL;
}
PyObject *result = NULL; /* guilty until proved innocent */
Bucket *b[3] = {NULL, NULL, NULL};
PyObject *meth = NULL;
PyObject *a = NULL;
int i;
for (i = 0; i < 3; i++) {
PyObject *r;
b[i] = (Bucket*)PyObject_CallObject((PyObject *)ob_type, NULL);
if (b[i] == NULL)
goto Done;
if (s[i] == Py_None) /* None is equivalent to empty, for BTrees */
continue;
meth = PyObject_GetAttr((PyObject *)b[i], __setstate___str);
if (meth == NULL)
goto Done;
a = PyTuple_New(1);
if (a == NULL)
goto Done;
PyTuple_SET_ITEM(a, 0, s[i]);
Py_INCREF(s[i]);
r = PyObject_CallObject(meth, a); /* b[i].__setstate__(s[i]) */
if (r == NULL)
goto Done;
Py_DECREF(r);
Py_DECREF(a);
Py_DECREF(meth);
a = meth = NULL;
}
if (b[0]->next != b[1]->next || b[0]->next != b[2]->next)
merge_error(-1, -1, -1, 0);
else
result = bucket_merge(b[0], b[1], b[2]);
if (b[0]->next != b[1]->next || b[0]->next != b[2]->next)
merge_error(-1, -1, -1, 0);
else
result = bucket_merge(b[0], b[1], b[2]);
Done:
Py_XDECREF(meth);
Py_XDECREF(a);
Py_XDECREF(b[0]);
Py_XDECREF(b[1]);
Py_XDECREF(b[2]);
Done:
Py_XDECREF(meth);
Py_XDECREF(a);
Py_XDECREF(b[0]);
Py_XDECREF(b[1]);
Py_XDECREF(b[2]);
return result;
return result;
}
static PyObject *
bucket__p_resolveConflict(Bucket *self, PyObject *args)
{
PyObject *s[3];
PyObject *s[3];
if (!PyArg_ParseTuple(args, "OOO", &s[0], &s[1], &s[2]))
return NULL;
if (!PyArg_ParseTuple(args, "OOO", &s[0], &s[1], &s[2]))
return NULL;
return _bucket__p_resolveConflict((PyObject *)self->ob_type, s);
return _bucket__p_resolveConflict((PyObject *)Py_TYPE(self), s);
}
#endif
......@@ -1518,113 +1605,116 @@ bucket__p_resolveConflict(Bucket *self, PyObject *args)
*/
static struct PyMemberDef Bucket_members[] = {
{"_next", T_OBJECT, offsetof(Bucket, next)},
{NULL}
{"_next", T_OBJECT, offsetof(Bucket, next)},
{NULL}
};
static struct PyMethodDef Bucket_methods[] = {
{"__getstate__", (PyCFunction) bucket_getstate, METH_NOARGS,
"__getstate__() -- Return the picklable state of the object"},
{"__setstate__", (PyCFunction) bucket_setstate, METH_O,
"__setstate__() -- Set the state of the object"},
{"__getstate__", (PyCFunction) bucket_getstate, METH_NOARGS,
"__getstate__() -- Return the picklable state of the object"},
{"keys", (PyCFunction) bucket_keys, METH_KEYWORDS,
"keys([min, max]) -- Return the keys"},
{"__setstate__", (PyCFunction) bucket_setstate, METH_O,
"__setstate__() -- Set the state of the object"},
{"has_key", (PyCFunction) bucket_has_key, METH_O,
"has_key(key) -- Test whether the bucket contains the given key"},
{"keys", (PyCFunction) bucket_keys, METH_VARARGS | METH_KEYWORDS,
"keys([min, max]) -- Return the keys"},
{"clear", (PyCFunction) bucket_clear, METH_VARARGS,
"clear() -- Remove all of the items from the bucket"},
{"has_key", (PyCFunction) bucket_has_key, METH_O,
"has_key(key) -- Test whether the bucket contains the given key"},
{"update", (PyCFunction) Mapping_update, METH_O,
"update(collection) -- Add the items from the given collection"},
{"clear", (PyCFunction) bucket_clear, METH_VARARGS,
"clear() -- Remove all of the items from the bucket"},
{"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS,
"maxKey([key]) -- Find the maximum key\n\n"
"If an argument is given, find the maximum <= the argument"},
{"update", (PyCFunction) Mapping_update, METH_O,
"update(collection) -- Add the items from the given collection"},
{"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS,
"minKey([key]) -- Find the minimum key\n\n"
"If an argument is given, find the minimum >= the argument"},
{"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS,
"maxKey([key]) -- Find the maximum key\n\n"
"If an argument is given, find the maximum <= the argument"},
{"values", (PyCFunction) bucket_values, METH_KEYWORDS,
"values([min, max]) -- Return the values"},
{"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS,
"minKey([key]) -- Find the minimum key\n\n"
"If an argument is given, find the minimum >= the argument"},
{"items", (PyCFunction) bucket_items, METH_KEYWORDS,
"items([min, max])) -- Return the items"},
{"values", (PyCFunction) bucket_values, METH_VARARGS | METH_KEYWORDS,
"values([min, max]) -- Return the values"},
{"byValue", (PyCFunction) bucket_byValue, METH_O,
"byValue(min) -- "
"Return value-keys with values >= min and reverse sorted by values"},
{"items", (PyCFunction) bucket_items, METH_VARARGS | METH_KEYWORDS,
"items([min, max])) -- Return the items"},
{"get", (PyCFunction) bucket_getm, METH_VARARGS,
"get(key[,default]) -- Look up a value\n\n"
"Return the default (or None) if the key is not found."},
{"byValue", (PyCFunction) bucket_byValue, METH_O,
"byValue(min) -- "
"Return value-keys with values >= min and reverse sorted by values"},
{"setdefault", (PyCFunction) bucket_setdefault, METH_VARARGS,
"D.setdefault(k, d) -> D.get(k, d), also set D[k]=d if k not in D.\n\n"
"Return the value like get() except that if key is missing, d is both\n"
"returned and inserted into the bucket as the value of k."},
{"get", (PyCFunction) bucket_getm, METH_VARARGS,
"get(key[,default]) -- Look up a value\n\n"
"Return the default (or None) if the key is not found."},
{"pop", (PyCFunction) bucket_pop, METH_VARARGS,
"D.pop(k[, d]) -> v, remove key and return the corresponding value.\n\n"
"If key is not found, d is returned if given, otherwise KeyError\n"
"is raised."},
{"setdefault", (PyCFunction) bucket_setdefault, METH_VARARGS,
"D.setdefault(k, d) -> D.get(k, d), also set D[k]=d if k not in D.\n\n"
"Return the value like get() except that if key is missing, d is both\n"
"returned and inserted into the bucket as the value of k."},
{"iterkeys", (PyCFunction) Bucket_iterkeys, METH_KEYWORDS,
"B.iterkeys([min[,max]]) -> an iterator over the keys of B"},
{"pop", (PyCFunction) bucket_pop, METH_VARARGS,
"D.pop(k[, d]) -> v, remove key and return the corresponding value.\n\n"
"If key is not found, d is returned if given, otherwise KeyError\n"
"is raised."},
{"itervalues", (PyCFunction) Bucket_itervalues, METH_KEYWORDS,
"B.itervalues([min[,max]]) -> an iterator over the values of B"},
{"iterkeys", (PyCFunction) Bucket_iterkeys, METH_VARARGS | METH_KEYWORDS,
"B.iterkeys([min[,max]]) -> an iterator over the keys of B"},
{"itervalues",
(PyCFunction) Bucket_itervalues, METH_VARARGS | METH_KEYWORDS,
"B.itervalues([min[,max]]) -> an iterator over the values of B"},
{"iteritems", (PyCFunction) Bucket_iteritems, METH_KEYWORDS,
"B.iteritems([min[,max]]) -> an iterator over the (key, value) items of B"},
{"iteritems", (PyCFunction) Bucket_iteritems, METH_VARARGS | METH_KEYWORDS,
"B.iteritems([min[,max]]) -> an iterator over the (key, value) "
"items of B"},
#ifdef EXTRA_BUCKET_METHODS
EXTRA_BUCKET_METHODS
EXTRA_BUCKET_METHODS
#endif
#ifdef PERSISTENT
{"_p_resolveConflict", (PyCFunction) bucket__p_resolveConflict,
METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_resolveConflict",
(PyCFunction) bucket__p_resolveConflict, METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_deactivate", (PyCFunction) bucket__p_deactivate, METH_KEYWORDS,
"_p_deactivate() -- Reinitialize from a newly created copy"},
{"_p_deactivate",
(PyCFunction) bucket__p_deactivate, METH_VARARGS | METH_KEYWORDS,
"_p_deactivate() -- Reinitialize from a newly created copy"},
#endif
{NULL, NULL}
{NULL, NULL}
};
static int
Bucket_init(PyObject *self, PyObject *args, PyObject *kwds)
{
PyObject *v = NULL;
PyObject *v = NULL;
if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "Bucket", &v))
return -1;
if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "Bucket", &v))
return -1;
if (v)
return update_from_seq(self, v);
else
return 0;
if (v)
return update_from_seq(self, v);
else
return 0;
}
static void
bucket_dealloc(Bucket *self)
{
if (self->state != cPersistent_GHOST_STATE)
_bucket_clear(self);
if (self->state != cPersistent_GHOST_STATE)
_bucket_clear(self);
cPersistenceCAPI->pertype->tp_dealloc((PyObject *)self);
cPersistenceCAPI->pertype->tp_dealloc((PyObject *)self);
}
static int
bucket_traverse(Bucket *self, visitproc visit, void *arg)
{
int err = 0;
int i, len;
int err = 0;
int i, len;
#define VISIT(SLOT) \
if (SLOT) { \
......@@ -1633,42 +1723,44 @@ bucket_traverse(Bucket *self, visitproc visit, void *arg)
goto Done; \
}
/* Call our base type's traverse function. Because buckets are
* subclasses of Peristent, there must be one.
*/
err = cPersistenceCAPI->pertype->tp_traverse((PyObject *)self, visit, arg);
if (err)
goto Done;
/* If this is registered with the persistence system, cleaning up cycles
* is the database's problem. It would be horrid to unghostify buckets
* here just to chase pointers every time gc runs.
*/
if (self->state == cPersistent_GHOST_STATE)
goto Done;
len = self->len;
(void)i; /* if neither keys nor values are PyObject*, "i" is otherwise
unreferenced and we get a nuisance compiler wng */
/* Call our base type's traverse function. Because buckets are
* subclasses of Peristent, there must be one.
*/
err = cPersistenceCAPI->pertype->tp_traverse((PyObject *)self, visit, arg);
if (err)
goto Done;
/* If this is registered with the persistence system, cleaning up cycles
* is the database's problem. It would be horrid to unghostify buckets
* here just to chase pointers every time gc runs.
*/
if (self->state == cPersistent_GHOST_STATE)
goto Done;
len = self->len;
/* if neither keys nor values are PyObject*, "i" is otherwise
unreferenced and we get a nuisance compiler wng */
(void)i;
(void)len;
#ifdef KEY_TYPE_IS_PYOBJECT
/* Keys are Python objects so need to be traversed. */
for (i = 0; i < len; i++)
VISIT(self->keys[i]);
/* Keys are Python objects so need to be traversed. */
for (i = 0; i < len; i++)
VISIT(self->keys[i]);
#endif
#ifdef VALUE_TYPE_IS_PYOBJECT
if (self->values != NULL) {
/* self->values exists (this is a mapping bucket, not a set bucket),
* and are Python objects, so need to be traversed. */
for (i = 0; i < len; i++)
VISIT(self->values[i]);
}
if (self->values != NULL) {
/* self->values exists (this is a mapping bucket, not a set bucket),
* and are Python objects, so need to be traversed. */
for (i = 0; i < len; i++)
VISIT(self->values[i]);
}
#endif
VISIT(self->next);
VISIT(self->next);
Done:
return err;
Done:
return err;
#undef VISIT
}
......@@ -1676,139 +1768,169 @@ bucket_traverse(Bucket *self, visitproc visit, void *arg)
static int
bucket_tp_clear(Bucket *self)
{
if (self->state != cPersistent_GHOST_STATE)
_bucket_clear(self);
return 0;
if (self->state != cPersistent_GHOST_STATE)
_bucket_clear(self);
return 0;
}
/* Code to access Bucket objects as mappings */
static int
Bucket_length( Bucket *self)
{
int r;
UNLESS (PER_USE(self)) return -1;
r = self->len;
PER_UNUSE(self);
return r;
int r;
UNLESS (PER_USE(self))
return -1;
r = self->len;
PER_UNUSE(self);
return r;
}
static PyMappingMethods Bucket_as_mapping = {
(lenfunc)Bucket_length, /*mp_length*/
(binaryfunc)bucket_getitem, /*mp_subscript*/
(objobjargproc)bucket_setitem, /*mp_ass_subscript*/
(lenfunc)Bucket_length, /*mp_length*/
(binaryfunc)bucket_getitem, /*mp_subscript*/
(objobjargproc)bucket_setitem, /*mp_ass_subscript*/
};
static PySequenceMethods Bucket_as_sequence = {
(lenfunc)0, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)0, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)bucket_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
(lenfunc)0, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)0, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)bucket_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
};
static PyObject *
bucket_repr(Bucket *self)
{
PyObject *i, *r;
char repr[10000];
int rv;
PyObject *i, *r;
#ifdef PY3K
PyObject *rb;
#endif
char repr[10000];
int rv;
i = bucket_items(self, NULL, NULL);
if (!i)
return NULL;
r = PyObject_Repr(i);
Py_DECREF(i);
if (!r) {
return NULL;
}
rv = PyOS_snprintf(repr, sizeof(repr),
"%s(%s)", self->ob_type->tp_name,
PyString_AS_STRING(r));
if (rv > 0 && rv < sizeof(repr)) {
Py_DECREF(r);
return PyString_FromStringAndSize(repr, strlen(repr));
}
else {
/* The static buffer wasn't big enough */
int size;
PyObject *s;
/* 3 for the parens and the null byte */
size = strlen(self->ob_type->tp_name) + PyString_GET_SIZE(r) + 3;
s = PyString_FromStringAndSize(NULL, size);
if (!s) {
Py_DECREF(r);
return r;
i = bucket_items(self, NULL, NULL);
if (!i)
{
return NULL;
}
r = PyObject_Repr(i);
Py_DECREF(i);
if (!r)
{
return NULL;
}
#ifdef PY3K
rb = PyUnicode_AsLatin1String(r);
rv = PyOS_snprintf(repr, sizeof(repr),
"%s(%s)", Py_TYPE(self)->tp_name,
PyBytes_AsString(rb));
Py_DECREF(rb);
#else
rv = PyOS_snprintf(repr, sizeof(repr),
"%s(%s)", Py_TYPE(self)->tp_name,
PyBytes_AS_STRING(r));
#endif
if (rv > 0 && rv < sizeof(repr))
{
Py_DECREF(r);
#ifdef PY3K
return PyUnicode_DecodeLatin1(repr, sizeof(repr), "surrogateescape");
#else
return PyBytes_FromStringAndSize(repr, sizeof(repr));
#endif
}
else
{
/* The static buffer wasn't big enough */
int size;
PyObject *s;
#ifdef PY3K
PyObject *result;
#endif
/* 3 for the parens and the null byte */
size = strlen(Py_TYPE(self)->tp_name) + PyBytes_GET_SIZE(r) + 3;
s = PyBytes_FromStringAndSize(NULL, size);
if (!s) {
Py_DECREF(r);
return r;
}
PyOS_snprintf(PyBytes_AS_STRING(s), size,
"%s(%s)", Py_TYPE(self)->tp_name, PyBytes_AS_STRING(r));
Py_DECREF(r);
#ifdef PY3K
result = PyUnicode_FromEncodedObject(s, "latin1", "surrogateescape");
Py_DECREF(s);
return result;
#else
return s;
#endif
}
PyOS_snprintf(PyString_AS_STRING(s), size,
"%s(%s)", self->ob_type->tp_name, PyString_AS_STRING(r));
Py_DECREF(r);
return s;
}
}
static PyTypeObject BucketType = {
PyObject_HEAD_INIT(NULL) /* PyPersist_Type */
0, /* ob_size */
MODULE_NAME MOD_NAME_PREFIX "Bucket",/* tp_name */
sizeof(Bucket), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)bucket_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
(reprfunc)bucket_repr, /* tp_repr */
0, /* tp_as_number */
&Bucket_as_sequence, /* tp_as_sequence */
&Bucket_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)bucket_traverse, /* tp_traverse */
(inquiry)bucket_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)Bucket_getiter, /* tp_iter */
0, /* tp_iternext */
Bucket_methods, /* tp_methods */
Bucket_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
Bucket_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
PyVarObject_HEAD_INIT(NULL, 0)
MODULE_NAME MOD_NAME_PREFIX "Bucket", /* tp_name */
sizeof(Bucket), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)bucket_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
(reprfunc)bucket_repr, /* tp_repr */
0, /* tp_as_number */
&Bucket_as_sequence, /* tp_as_sequence */
&Bucket_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT |
Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)bucket_traverse, /* tp_traverse */
(inquiry)bucket_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)Bucket_getiter, /* tp_iter */
0, /* tp_iternext */
Bucket_methods, /* tp_methods */
Bucket_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
Bucket_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
};
static int
nextBucket(SetIteration *i)
{
if (i->position >= 0)
if (i->position >= 0)
{
UNLESS(PER_USE(BUCKET(i->set))) return -1;
UNLESS(PER_USE(BUCKET(i->set)))
return -1;
if (i->position)
if (i->position)
{
DECREF_KEY(i->key);
DECREF_VALUE(i->value);
}
if (i->position < BUCKET(i->set)->len)
if (i->position < BUCKET(i->set)->len)
{
COPY_KEY(i->key, BUCKET(i->set)->keys[i->position]);
INCREF_KEY(i->key);
......@@ -1816,15 +1938,14 @@ nextBucket(SetIteration *i)
INCREF_VALUE(i->value);
i->position ++;
}
else
else
{
i->position = -1;
PER_ACCESSED(BUCKET(i->set));
}
PER_ALLOW_DEACTIVATION(BUCKET(i->set));
PER_ALLOW_DEACTIVATION(BUCKET(i->set));
}
return 0;
return 0;
}
......@@ -99,17 +99,8 @@ weightedUnionPy = _set_operation(_weightedUnion, IFSetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, IFSetPy)
try:
from _IFBTree import IFBucket
from _IFBTree import IFSet
from _IFBTree import IFBTree
from _IFBTree import IFTreeSet
from _IFBTree import difference
from _IFBTree import union
from _IFBTree import intersection
from _IFBTree import multiunion
from _OIBTree import weightedUnion
from _OIBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._IFBTree import IFBucket
except ImportError: #pragma NO COVER w/ C extensions
IFBucket = IFBucketPy
IFSet = IFSetPy
IFBTree = IFBTreePy
......@@ -120,6 +111,16 @@ except ImportError: #pragma NO COVER
multiunion = multiunionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._IFBTree import IFSet
from ._IFBTree import IFBTree
from ._IFBTree import IFTreeSet
from ._IFBTree import difference
from ._IFBTree import union
from ._IFBTree import intersection
from ._IFBTree import multiunion
from ._IFBTree import weightedUnion
from ._IFBTree import weightedIntersection
Bucket = IFBucket
Set = IFSet
......
......@@ -100,17 +100,8 @@ weightedUnionPy = _set_operation(_weightedUnion, IISetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, IISetPy)
try:
from _IIBTree import IIBucket
from _IIBTree import IISet
from _IIBTree import IIBTree
from _IIBTree import IITreeSet
from _IIBTree import difference
from _IIBTree import union
from _IIBTree import intersection
from _IIBTree import multiunion
from _IIBTree import weightedUnion
from _IIBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._IIBTree import IIBucket
except ImportError: #pragma NO COVER w/ C extensions
IIBucket = IIBucketPy
IISet = IISetPy
IIBTree = IIBTreePy
......@@ -121,6 +112,16 @@ except ImportError: #pragma NO COVER
multiunion = multiunionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._IIBTree import IISet
from ._IIBTree import IIBTree
from ._IIBTree import IITreeSet
from ._IIBTree import difference
from ._IIBTree import union
from ._IIBTree import intersection
from ._IIBTree import multiunion
from ._IIBTree import weightedUnion
from ._IIBTree import weightedIntersection
Bucket = IIBucket
Set = IISet
......
......@@ -83,15 +83,8 @@ intersectionPy = _set_operation(_intersection, IOSetPy)
multiunionPy = _set_operation(_multiunion, IOSetPy)
try:
from _IOBTree import IOBucket
from _IOBTree import IOSet
from _IOBTree import IOBTree
from _IOBTree import IOTreeSet
from _IOBTree import difference
from _IOBTree import union
from _IOBTree import intersection
from _IOBTree import multiunion
except ImportError: #pragma NO COVER
from ._IOBTree import IOBucket
except ImportError: #pragma NO COVER w/ C extensions
IOBucket = IOBucketPy
IOSet = IOSetPy
IOBTree = IOBTreePy
......@@ -100,6 +93,14 @@ except ImportError: #pragma NO COVER
union = unionPy
intersection = intersectionPy
multiunion = multiunionPy
else: #pragma NO COVER w/o C extensions
from ._IOBTree import IOSet
from ._IOBTree import IOBTree
from ._IOBTree import IOTreeSet
from ._IOBTree import difference
from ._IOBTree import union
from ._IOBTree import intersection
from ._IOBTree import multiunion
Bucket = IOBucket
Set = IOSet
......
......@@ -100,17 +100,8 @@ weightedUnionPy = _set_operation(_weightedUnion, LFSetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, LFSetPy)
try:
from _LFBTree import LFBucket
from _LFBTree import LFSet
from _LFBTree import LFBTree
from _LFBTree import LFTreeSet
from _LFBTree import difference
from _LFBTree import union
from _LFBTree import intersection
from _LFBTree import multiunion
from _OIBTree import weightedUnion
from _OIBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._LFBTree import LFBucket
except ImportError: #pragma NO COVER w/ C extensions
LFBucket = LFBucketPy
LFSet = LFSetPy
LFBTree = LFBTreePy
......@@ -121,6 +112,16 @@ except ImportError: #pragma NO COVER
multiunion = multiunionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._LFBTree import LFSet
from ._LFBTree import LFBTree
from ._LFBTree import LFTreeSet
from ._LFBTree import difference
from ._LFBTree import union
from ._LFBTree import intersection
from ._LFBTree import multiunion
from ._LFBTree import weightedUnion
from ._LFBTree import weightedIntersection
Bucket = LFBucket
Set = LFSet
......
......@@ -100,17 +100,8 @@ weightedUnionPy = _set_operation(_weightedUnion, LLSetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, LLSetPy)
try:
from _LLBTree import LLBucket
from _LLBTree import LLSet
from _LLBTree import LLBTree
from _LLBTree import LLTreeSet
from _LLBTree import difference
from _LLBTree import union
from _LLBTree import intersection
from _LLBTree import multiunion
from _LLBTree import weightedUnion
from _LLBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._LLBTree import LLBucket
except ImportError: #pragma NO COVER w/ C extensions
LLBucket = LLBucketPy
LLSet = LLSetPy
LLBTree = LLBTreePy
......@@ -121,6 +112,16 @@ except ImportError: #pragma NO COVER
multiunion = multiunionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._LLBTree import LLSet
from ._LLBTree import LLBTree
from ._LLBTree import LLTreeSet
from ._LLBTree import difference
from ._LLBTree import union
from ._LLBTree import intersection
from ._LLBTree import multiunion
from ._LLBTree import weightedUnion
from ._LLBTree import weightedIntersection
Bucket = LLBucket
Set = LLSet
......
......@@ -83,15 +83,8 @@ intersectionPy = _set_operation(_intersection, LOSetPy)
multiunionPy = _set_operation(_multiunion, LOSetPy)
try:
from _LOBTree import LOBucket
from _LOBTree import LOSet
from _LOBTree import LOBTree
from _LOBTree import LOTreeSet
from _LOBTree import difference
from _LOBTree import union
from _LOBTree import intersection
from _LOBTree import multiunion
except ImportError: #pragma NO COVER
from ._LOBTree import LOBucket
except ImportError: #pragma NO COVER w/ C extensions
LOBucket = LOBucketPy
LOSet = LOSetPy
LOBTree = LOBTreePy
......@@ -100,6 +93,14 @@ except ImportError: #pragma NO COVER
union = unionPy
intersection = intersectionPy
multiunion = multiunionPy
else: #pragma NO COVER w/o C extensions
from ._LOBTree import LOSet
from ._LOBTree import LOBTree
from ._LOBTree import LOTreeSet
from ._LOBTree import difference
from ._LOBTree import union
from ._LOBTree import intersection
from ._LOBTree import multiunion
Bucket = LOBucket
Set = LOSet
......
......@@ -97,16 +97,8 @@ weightedUnionPy = _set_operation(_weightedUnion, OISetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, OISetPy)
try:
from _OIBTree import OIBucket
from _OIBTree import OISet
from _OIBTree import OIBTree
from _OIBTree import OITreeSet
from _OIBTree import difference
from _OIBTree import union
from _OIBTree import intersection
from _OIBTree import weightedUnion
from _OIBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._OIBTree import OIBucket
except ImportError: #pragma NO COVER w/ C extensions
OIBucket = OIBucketPy
OISet = OISetPy
OIBTree = OIBTreePy
......@@ -116,6 +108,15 @@ except ImportError: #pragma NO COVER
intersection = intersectionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._OIBTree import OISet
from ._OIBTree import OIBTree
from ._OIBTree import OITreeSet
from ._OIBTree import difference
from ._OIBTree import union
from ._OIBTree import intersection
from ._OIBTree import weightedUnion
from ._OIBTree import weightedIntersection
Bucket = OIBucket
......
......@@ -98,16 +98,8 @@ weightedUnionPy = _set_operation(_weightedUnion, OLSetPy)
weightedIntersectionPy = _set_operation(_weightedIntersection, OLSetPy)
try:
from _OLBTree import OLBucket
from _OLBTree import OLSet
from _OLBTree import OLBTree
from _OLBTree import OLTreeSet
from _OLBTree import difference
from _OLBTree import union
from _OLBTree import intersection
from _OLBTree import weightedUnion
from _OLBTree import weightedIntersection
except ImportError: #pragma NO COVER
from ._OLBTree import OLBucket
except ImportError: #pragma NO COVER w/ C extensions
OLBucket = OLBucketPy
OLSet = OLSetPy
OLBTree = OLBTreePy
......@@ -117,6 +109,15 @@ except ImportError: #pragma NO COVER
intersection = intersectionPy
weightedUnion = weightedUnionPy
weightedIntersection = weightedIntersectionPy
else: #pragma NO COVER w/o C extensions
from ._OLBTree import OLSet
from ._OLBTree import OLBTree
from ._OLBTree import OLTreeSet
from ._OLBTree import difference
from ._OLBTree import union
from ._OLBTree import intersection
from ._OLBTree import weightedUnion
from ._OLBTree import weightedIntersection
Bucket = OLBucket
Set = OLSet
......
......@@ -78,14 +78,8 @@ unionPy = _set_operation(_union, OOSetPy)
intersectionPy = _set_operation(_intersection, OOSetPy)
try:
from _OOBTree import OOBucket
from _OOBTree import OOSet
from _OOBTree import OOBTree
from _OOBTree import OOTreeSet
from _OOBTree import difference
from _OOBTree import union
from _OOBTree import intersection
except ImportError: #pragma NO COVER
from ._OOBTree import OOBucket
except ImportError as e: #pragma NO COVER w/ C extensions
OOBucket = OOBucketPy
OOSet = OOSetPy
OOBTree = OOBTreePy
......@@ -93,6 +87,13 @@ except ImportError: #pragma NO COVER
difference = differencePy
union = unionPy
intersection = intersectionPy
else: #pragma NO COVER w/o C extensions
from ._OOBTree import OOSet
from ._OOBTree import OOBTree
from ._OOBTree import OOTreeSet
from ._OOBTree import difference
from ._OOBTree import union
from ._OOBTree import intersection
Bucket = OOBucket
Set = OOSet
......
......@@ -11,18 +11,21 @@
FOR A PARTICULAR PURPOSE
****************************************************************************/
#include "_compat.h"
#define SETTEMPLATE_C "$Id$\n"
static PyObject *
Set_insert(Bucket *self, PyObject *args)
{
PyObject *key;
int i;
UNLESS (PyArg_ParseTuple(args, "O", &key)) return NULL;
if ( (i=_bucket_set(self, key, Py_None, 1, 1, 0)) < 0) return NULL;
return PyInt_FromLong(i);
PyObject *key;
int i;
UNLESS (PyArg_ParseTuple(args, "O", &key))
return NULL;
if ( (i=_bucket_set(self, key, Py_None, 1, 1, 0)) < 0)
return NULL;
return INT_FROM_LONG(i);
}
/* _Set_update and _TreeSet_update are identical except for the
......@@ -37,28 +40,28 @@ _Set_update(Bucket *self, PyObject *seq)
iter = PyObject_GetIter(seq);
if (iter == NULL)
return -1;
return -1;
while (1) {
v = PyIter_Next(iter);
if (v == NULL) {
if (PyErr_Occurred())
goto err;
else
break;
}
ind = _bucket_set(self, v, Py_None, 1, 1, 0);
Py_DECREF(v);
if (ind < 0)
goto err;
else
n += ind;
v = PyIter_Next(iter);
if (v == NULL) {
if (PyErr_Occurred())
goto err;
else
break;
}
ind = _bucket_set(self, v, Py_None, 1, 1, 0);
Py_DECREF(v);
if (ind < 0)
goto err;
else
n += ind;
}
err:
err:
Py_DECREF(iter);
if (ind < 0)
return -1;
return -1;
return n;
}
......@@ -69,147 +72,156 @@ Set_update(Bucket *self, PyObject *args)
int n = 0;
if (!PyArg_ParseTuple(args, "|O:update", &seq))
return NULL;
return NULL;
if (seq) {
n = _Set_update(self, seq);
if (n < 0)
return NULL;
n = _Set_update(self, seq);
if (n < 0)
return NULL;
}
return PyInt_FromLong(n);
return INT_FROM_LONG(n);
}
static PyObject *
Set_remove(Bucket *self, PyObject *args)
{
PyObject *key;
PyObject *key;
UNLESS (PyArg_ParseTuple(args, "O", &key)) return NULL;
if (_bucket_set(self, key, NULL, 0, 1, 0) < 0) return NULL;
UNLESS (PyArg_ParseTuple(args, "O", &key))
return NULL;
if (_bucket_set(self, key, NULL, 0, 1, 0) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
Py_INCREF(Py_None);
return Py_None;
}
static int
_set_setstate(Bucket *self, PyObject *args)
{
PyObject *k, *items;
Bucket *next=0;
int i, l, copied=1;
KEY_TYPE *keys;
UNLESS (PyArg_ParseTuple(args, "O|O", &items, &next))
return -1;
if (!PyTuple_Check(items)) {
PyErr_SetString(PyExc_TypeError,
"tuple required for first state element");
return -1;
}
PyObject *k, *items;
Bucket *next=0;
int i, l, copied=1;
KEY_TYPE *keys;
UNLESS (PyArg_ParseTuple(args, "O|O", &items, &next))
return -1;
if (!PyTuple_Check(items)) {
PyErr_SetString(PyExc_TypeError,
"tuple required for first state element");
return -1;
}
if ((l=PyTuple_Size(items)) < 0) return -1;
if ((l=PyTuple_Size(items)) < 0)
return -1;
for (i=self->len; --i >= 0; )
for (i=self->len; --i >= 0; )
{
DECREF_KEY(self->keys[i]);
DECREF_KEY(self->keys[i]);
}
self->len=0;
self->len=0;
if (self->next)
if (self->next)
{
Py_DECREF(self->next);
self->next=0;
Py_DECREF(self->next);
self->next=0;
}
if (l > self->size)
if (l > self->size)
{
UNLESS (keys=BTree_Realloc(self->keys, sizeof(KEY_TYPE)*l)) return -1;
self->keys=keys;
self->size=l;
UNLESS (keys=BTree_Realloc(self->keys, sizeof(KEY_TYPE)*l))
return -1;
self->keys=keys;
self->size=l;
}
for (i=0; i<l; i++)
for (i=0; i<l; i++)
{
k=PyTuple_GET_ITEM(items, i);
COPY_KEY_FROM_ARG(self->keys[i], k, copied);
UNLESS (copied) return -1;
INCREF_KEY(self->keys[i]);
k=PyTuple_GET_ITEM(items, i);
COPY_KEY_FROM_ARG(self->keys[i], k, copied);
UNLESS (copied)
return -1;
INCREF_KEY(self->keys[i]);
}
self->len=l;
self->len=l;
if (next)
if (next)
{
self->next=next;
Py_INCREF(next);
self->next=next;
Py_INCREF(next);
}
return 0;
return 0;
}
static PyObject *
set_setstate(Bucket *self, PyObject *args)
{
int r;
int r;
UNLESS (PyArg_ParseTuple(args, "O", &args)) return NULL;
UNLESS (PyArg_ParseTuple(args, "O", &args))
return NULL;
PER_PREVENT_DEACTIVATION(self);
r=_set_setstate(self, args);
PER_UNUSE(self);
PER_PREVENT_DEACTIVATION(self);
r=_set_setstate(self, args);
PER_UNUSE(self);
if (r < 0) return NULL;
Py_INCREF(Py_None);
return Py_None;
if (r < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static struct PyMethodDef Set_methods[] = {
{"__getstate__", (PyCFunction) bucket_getstate, METH_VARARGS,
"__getstate__() -- Return the picklable state of the object"},
{"__getstate__", (PyCFunction) bucket_getstate, METH_VARARGS,
"__getstate__() -- Return the picklable state of the object"},
{"__setstate__", (PyCFunction) set_setstate, METH_VARARGS,
"__setstate__() -- Set the state of the object"},
{"__setstate__", (PyCFunction) set_setstate, METH_VARARGS,
"__setstate__() -- Set the state of the object"},
{"keys", (PyCFunction) bucket_keys, METH_KEYWORDS,
{"keys", (PyCFunction) bucket_keys, METH_VARARGS | METH_KEYWORDS,
"keys() -- Return the keys"},
{"has_key", (PyCFunction) bucket_has_key, METH_O,
{"has_key", (PyCFunction) bucket_has_key, METH_O,
"has_key(key) -- Test whether the bucket contains the given key"},
{"clear", (PyCFunction) bucket_clear, METH_VARARGS,
"clear() -- Remove all of the items from the bucket"},
{"clear", (PyCFunction) bucket_clear, METH_VARARGS,
"clear() -- Remove all of the items from the bucket"},
{"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS,
"maxKey([key]) -- Find the maximum key\n\n"
"If an argument is given, find the maximum <= the argument"},
{"maxKey", (PyCFunction) Bucket_maxKey, METH_VARARGS,
"maxKey([key]) -- Find the maximum key\n\n"
"If an argument is given, find the maximum <= the argument"},
{"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS,
"minKey([key]) -- Find the minimum key\n\n"
"If an argument is given, find the minimum >= the argument"},
{"minKey", (PyCFunction) Bucket_minKey, METH_VARARGS,
"minKey([key]) -- Find the minimum key\n\n"
"If an argument is given, find the minimum >= the argument"},
#ifdef PERSISTENT
{"_p_resolveConflict", (PyCFunction) bucket__p_resolveConflict, METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_resolveConflict",
(PyCFunction) bucket__p_resolveConflict, METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_deactivate", (PyCFunction) bucket__p_deactivate, METH_KEYWORDS,
"_p_deactivate() -- Reinitialize from a newly created copy"},
{"_p_deactivate",
(PyCFunction) bucket__p_deactivate, METH_VARARGS | METH_KEYWORDS,
"_p_deactivate() -- Reinitialize from a newly created copy"},
#endif
{"add", (PyCFunction)Set_insert, METH_VARARGS,
"add(id) -- Add a key to the set"},
{"add", (PyCFunction)Set_insert, METH_VARARGS,
"add(id) -- Add a key to the set"},
{"insert", (PyCFunction)Set_insert, METH_VARARGS,
"insert(id) -- Add a key to the set"},
{"insert", (PyCFunction)Set_insert, METH_VARARGS,
"insert(id) -- Add a key to the set"},
{"update", (PyCFunction)Set_update, METH_VARARGS,
"update(seq) -- Add the items from the given sequence to the set"},
{"update", (PyCFunction)Set_update, METH_VARARGS,
"update(seq) -- Add the items from the given sequence to the set"},
{"remove", (PyCFunction)Set_remove, METH_VARARGS,
"remove(id) -- Remove an id from the set"},
{"remove", (PyCFunction)Set_remove, METH_VARARGS,
"remove(id) -- Remove an id from the set"},
{NULL, NULL} /* sentinel */
{NULL, NULL} /* sentinel */
};
static int
......@@ -218,12 +230,12 @@ Set_init(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *v = NULL;
if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "Set", &v))
return -1;
return -1;
if (v)
return _Set_update((Bucket *)self, v);
return _Set_update((Bucket *)self, v);
else
return 0;
return 0;
}
......@@ -231,128 +243,131 @@ Set_init(PyObject *self, PyObject *args, PyObject *kwds)
static PyObject *
set_repr(Bucket *self)
{
static PyObject *format;
PyObject *r, *t;
if (!format)
format = PyString_FromString(MOD_NAME_PREFIX "Set(%s)");
UNLESS (t = PyTuple_New(1)) return NULL;
UNLESS (r = bucket_keys(self, NULL, NULL)) goto err;
PyTuple_SET_ITEM(t, 0, r);
r = t;
ASSIGN(r, PyString_Format(format, r));
return r;
static PyObject *format;
PyObject *r, *t;
if (!format)
format = TEXT_FROM_STRING(MOD_NAME_PREFIX "Set(%s)");
UNLESS (t = PyTuple_New(1))
return NULL;
UNLESS (r = bucket_keys(self, NULL, NULL))
goto err;
PyTuple_SET_ITEM(t, 0, r);
r = t;
ASSIGN(r, TEXT_FORMAT(format, r));
return r;
err:
Py_DECREF(t);
return NULL;
Py_DECREF(t);
return NULL;
}
static Py_ssize_t
set_length(Bucket *self)
{
int r;
int r;
PER_USE_OR_RETURN(self, -1);
r = self->len;
PER_UNUSE(self);
PER_USE_OR_RETURN(self, -1);
r = self->len;
PER_UNUSE(self);
return r;
return r;
}
static PyObject *
set_item(Bucket *self, Py_ssize_t index)
{
PyObject *r=0;
PyObject *r=0;
PER_USE_OR_RETURN(self, NULL);
if (index >= 0 && index < self->len)
PER_USE_OR_RETURN(self, NULL);
if (index >= 0 && index < self->len)
{
COPY_KEY_TO_OBJECT(r, self->keys[index]);
COPY_KEY_TO_OBJECT(r, self->keys[index]);
}
else
IndexError(index);
else
IndexError(index);
PER_UNUSE(self);
PER_UNUSE(self);
return r;
return r;
}
static PySequenceMethods set_as_sequence = {
(lenfunc)set_length, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)set_item, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)bucket_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
(lenfunc)set_length, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)set_item, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)bucket_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
};
static PyTypeObject SetType = {
PyObject_HEAD_INIT(NULL) /* PyPersist_Type */
0, /* ob_size */
MODULE_NAME MOD_NAME_PREFIX "Set", /* tp_name */
sizeof(Bucket), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)bucket_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
(reprfunc)set_repr, /* tp_repr */
0, /* tp_as_number */
&set_as_sequence, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)bucket_traverse, /* tp_traverse */
(inquiry)bucket_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)Bucket_getiter, /* tp_iter */
0, /* tp_iternext */
Set_methods, /* tp_methods */
Bucket_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
Set_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
PyVarObject_HEAD_INIT(NULL, 0) /* PyPersist_Type */
MODULE_NAME MOD_NAME_PREFIX "Set", /* tp_name */
sizeof(Bucket), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)bucket_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
(reprfunc)set_repr, /* tp_repr */
0, /* tp_as_number */
&set_as_sequence, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT |
Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)bucket_traverse, /* tp_traverse */
(inquiry)bucket_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)Bucket_getiter, /* tp_iter */
0, /* tp_iternext */
Set_methods, /* tp_methods */
Bucket_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
Set_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
};
static int
nextSet(SetIteration *i)
{
if (i->position >= 0)
if (i->position >= 0)
{
UNLESS(PER_USE(BUCKET(i->set))) return -1;
UNLESS(PER_USE(BUCKET(i->set)))
return -1;
if (i->position)
if (i->position)
{
DECREF_KEY(i->key);
}
if (i->position < BUCKET(i->set)->len)
if (i->position < BUCKET(i->set)->len)
{
COPY_KEY(i->key, BUCKET(i->set)->keys[i->position]);
INCREF_KEY(i->key);
i->position ++;
}
else
else
{
i->position = -1;
PER_ACCESSED(BUCKET(i->set));
......
......@@ -11,6 +11,7 @@
FOR A PARTICULAR PURPOSE
****************************************************************************/
#include "_compat.h"
#define TREESETTEMPLATE_C "$Id$\n"
......@@ -21,11 +22,11 @@ TreeSet_insert(BTree *self, PyObject *args)
int i;
if (!PyArg_ParseTuple(args, "O:insert", &key))
return NULL;
return NULL;
i = _BTree_set(self, key, Py_None, 1, 1);
if (i < 0)
return NULL;
return PyInt_FromLong(i);
return NULL;
return INT_FROM_LONG(i);
}
/* _Set_update and _TreeSet_update are identical except for the
......@@ -40,28 +41,30 @@ _TreeSet_update(BTree *self, PyObject *seq)
iter = PyObject_GetIter(seq);
if (iter == NULL)
return -1;
while (1) {
v = PyIter_Next(iter);
if (v == NULL) {
if (PyErr_Occurred())
goto err;
else
break;
}
ind = _BTree_set(self, v, Py_None, 1, 1);
Py_DECREF(v);
if (ind < 0)
goto err;
else
n += ind;
return -1;
while (1)
{
v = PyIter_Next(iter);
if (v == NULL)
{
if (PyErr_Occurred())
goto err;
else
break;
}
ind = _BTree_set(self, v, Py_None, 1, 1);
Py_DECREF(v);
if (ind < 0)
goto err;
else
n += ind;
}
err:
err:
Py_DECREF(iter);
if (ind < 0)
return -1;
return -1;
return n;
}
......@@ -72,116 +75,124 @@ TreeSet_update(BTree *self, PyObject *args)
int n = 0;
if (!PyArg_ParseTuple(args, "|O:update", &seq))
return NULL;
return NULL;
if (seq) {
n = _TreeSet_update(self, seq);
if (n < 0)
return NULL;
if (seq)
{
n = _TreeSet_update(self, seq);
if (n < 0)
return NULL;
}
return PyInt_FromLong(n);
return INT_FROM_LONG(n);
}
static PyObject *
TreeSet_remove(BTree *self, PyObject *args)
{
PyObject *key;
PyObject *key;
UNLESS (PyArg_ParseTuple(args, "O", &key)) return NULL;
if (_BTree_set(self, key, NULL, 0, 1) < 0) return NULL;
Py_INCREF(Py_None);
return Py_None;
UNLESS (PyArg_ParseTuple(args, "O", &key))
return NULL;
if (_BTree_set(self, key, NULL, 0, 1) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
TreeSet_setstate(BTree *self, PyObject *args)
{
int r;
int r;
if (!PyArg_ParseTuple(args,"O",&args)) return NULL;
if (!PyArg_ParseTuple(args,"O",&args))
return NULL;
PER_PREVENT_DEACTIVATION(self);
r=_BTree_setstate(self, args, 1);
PER_UNUSE(self);
PER_PREVENT_DEACTIVATION(self);
r=_BTree_setstate(self, args, 1);
PER_UNUSE(self);
if (r < 0) return NULL;
Py_INCREF(Py_None);
return Py_None;
if (r < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static struct PyMethodDef TreeSet_methods[] = {
{"__getstate__", (PyCFunction) BTree_getstate, METH_NOARGS,
"__getstate__() -> state\n\n"
"Return the picklable state of the TreeSet."},
static struct PyMethodDef TreeSet_methods[] =
{
{"__getstate__", (PyCFunction) BTree_getstate, METH_NOARGS,
"__getstate__() -> state\n\n"
"Return the picklable state of the TreeSet."},
{"__setstate__", (PyCFunction) TreeSet_setstate, METH_VARARGS,
"__setstate__(state)\n\n"
"Set the state of the TreeSet."},
{"__setstate__", (PyCFunction) TreeSet_setstate, METH_VARARGS,
"__setstate__(state)\n\n"
"Set the state of the TreeSet."},
{"has_key", (PyCFunction) BTree_has_key, METH_O,
"has_key(key)\n\n"
"Return true if the TreeSet contains the given key."},
{"has_key", (PyCFunction) BTree_has_key, METH_O,
"has_key(key)\n\n"
"Return true if the TreeSet contains the given key."},
{"keys", (PyCFunction) BTree_keys, METH_KEYWORDS,
"keys([min, max]) -> list of keys\n\n"
"Returns the keys of the TreeSet. If min and max are supplied, only\n"
"keys greater than min and less than max are returned."},
{"keys", (PyCFunction) BTree_keys, METH_VARARGS | METH_KEYWORDS,
"keys([min, max]) -> list of keys\n\n"
"Returns the keys of the TreeSet. If min and max are supplied, only\n"
"keys greater than min and less than max are returned."},
{"maxKey", (PyCFunction) BTree_maxKey, METH_VARARGS,
"maxKey([max]) -> key\n\n"
"Return the largest key in the BTree. If max is specified, return\n"
"the largest key <= max."},
{"maxKey", (PyCFunction) BTree_maxKey, METH_VARARGS,
"maxKey([max]) -> key\n\n"
"Return the largest key in the BTree. If max is specified, return\n"
"the largest key <= max."},
{"minKey", (PyCFunction) BTree_minKey, METH_VARARGS,
"minKey([mi]) -> key\n\n"
"Return the smallest key in the BTree. If min is specified, return\n"
"the smallest key >= min."},
{"minKey", (PyCFunction) BTree_minKey, METH_VARARGS,
"minKey([mi]) -> key\n\n"
"Return the smallest key in the BTree. If min is specified, return\n"
"the smallest key >= min."},
{"clear", (PyCFunction) BTree_clear, METH_NOARGS,
"clear()\n\nRemove all of the items from the BTree."},
{"clear", (PyCFunction) BTree_clear, METH_NOARGS,
"clear()\n\nRemove all of the items from the BTree."},
{"add", (PyCFunction)TreeSet_insert, METH_VARARGS,
"add(id) -- Add an item to the set"},
{"add", (PyCFunction)TreeSet_insert, METH_VARARGS,
"add(id) -- Add an item to the set"},
{"insert", (PyCFunction)TreeSet_insert, METH_VARARGS,
"insert(id) -- Add an item to the set"},
{"insert", (PyCFunction)TreeSet_insert, METH_VARARGS,
"insert(id) -- Add an item to the set"},
{"update", (PyCFunction)TreeSet_update, METH_VARARGS,
"update(collection)\n\n Add the items from the given collection."},
{"update", (PyCFunction)TreeSet_update, METH_VARARGS,
"update(collection)\n\n Add the items from the given collection."},
{"remove", (PyCFunction)TreeSet_remove, METH_VARARGS,
"remove(id) -- Remove a key from the set"},
{"remove", (PyCFunction)TreeSet_remove, METH_VARARGS,
"remove(id) -- Remove a key from the set"},
{"_check", (PyCFunction) BTree_check, METH_NOARGS,
"Perform sanity check on TreeSet, and raise exception if flawed."},
{"_check", (PyCFunction) BTree_check, METH_NOARGS,
"Perform sanity check on TreeSet, and raise exception if flawed."},
#ifdef PERSISTENT
{"_p_resolveConflict", (PyCFunction) BTree__p_resolveConflict, METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_resolveConflict",
(PyCFunction) BTree__p_resolveConflict, METH_VARARGS,
"_p_resolveConflict() -- Reinitialize from a newly created copy"},
{"_p_deactivate", (PyCFunction) BTree__p_deactivate, METH_KEYWORDS,
"_p_deactivate()\n\nReinitialize from a newly created copy."},
{"_p_deactivate",
(PyCFunction) BTree__p_deactivate, METH_VARARGS | METH_KEYWORDS,
"_p_deactivate()\n\nReinitialize from a newly created copy."},
#endif
{NULL, NULL} /* sentinel */
{NULL, NULL} /* sentinel */
};
static PyMappingMethods TreeSet_as_mapping = {
(lenfunc)BTree_length, /*mp_length*/
(lenfunc)BTree_length, /*mp_length*/
};
static PySequenceMethods TreeSet_as_sequence = {
(lenfunc)0, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)0, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)BTree_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
(lenfunc)0, /* sq_length */
(binaryfunc)0, /* sq_concat */
(ssizeargfunc)0, /* sq_repeat */
(ssizeargfunc)0, /* sq_item */
(ssizessizeargfunc)0, /* sq_slice */
(ssizeobjargproc)0, /* sq_ass_item */
(ssizessizeobjargproc)0, /* sq_ass_slice */
(objobjproc)BTree_contains, /* sq_contains */
0, /* sq_inplace_concat */
0, /* sq_inplace_repeat */
};
static int
......@@ -190,53 +201,54 @@ TreeSet_init(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *v = NULL;
if (!PyArg_ParseTuple(args, "|O:" MOD_NAME_PREFIX "TreeSet", &v))
return -1;
return -1;
if (v)
return _TreeSet_update((BTree *)self, v);
return _TreeSet_update((BTree *)self, v);
else
return 0;
return 0;
}
static PyTypeObject TreeSetType = {
PyObject_HEAD_INIT(NULL) /* PyPersist_Type */
0, /* ob_size */
MODULE_NAME MOD_NAME_PREFIX "TreeSet",/* tp_name */
sizeof(BTree), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)BTree_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
&BTree_as_number_for_nonzero, /* tp_as_number */
&TreeSet_as_sequence, /* tp_as_sequence */
&TreeSet_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)BTree_traverse, /* tp_traverse */
(inquiry)BTree_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)BTree_getiter, /* tp_iter */
0, /* tp_iternext */
TreeSet_methods, /* tp_methods */
BTree_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
TreeSet_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
static PyTypeObject TreeSetType =
{
PyVarObject_HEAD_INIT(NULL, 0)
MODULE_NAME MOD_NAME_PREFIX "TreeSet", /* tp_name */
sizeof(BTree), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)BTree_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
&BTree_as_number_for_nonzero, /* tp_as_number */
&TreeSet_as_sequence, /* tp_as_sequence */
&TreeSet_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT |
Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
(traverseproc)BTree_traverse, /* tp_traverse */
(inquiry)BTree_tp_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)BTree_getiter, /* tp_iter */
0, /* tp_iternext */
TreeSet_methods, /* tp_methods */
BTree_members, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
TreeSet_init, /* tp_init */
0, /* tp_alloc */
0, /*PyType_GenericNew,*/ /* tp_new */
};
......@@ -25,10 +25,17 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "IF"
#define INITMODULE init_IFBTree
#define DEFAULT_MAX_BUCKET_SIZE 120
#define DEFAULT_MAX_BTREE_SIZE 500
#include "_compat.h"
#include "intkeymacros.h"
#include "floatvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__IFBTree
#else
#define INITMODULE init_IFBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -25,10 +25,17 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "II"
#define INITMODULE init_IIBTree
#define DEFAULT_MAX_BUCKET_SIZE 120
#define DEFAULT_MAX_BTREE_SIZE 500
#include "_compat.h"
#include "intkeymacros.h"
#include "intvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__IIBTree
#else
#define INITMODULE init_IIBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -23,10 +23,17 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "IO"
#define DEFAULT_MAX_BUCKET_SIZE 60
#define DEFAULT_MAX_BTREE_SIZE 500
#define INITMODULE init_IOBTree
#include "_compat.h"
#include "intkeymacros.h"
#include "objectvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__IOBTree
#else
#define INITMODULE init_IOBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -25,12 +25,19 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "LF"
#define INITMODULE init_LFBTree
#define DEFAULT_MAX_BUCKET_SIZE 120
#define DEFAULT_MAX_BTREE_SIZE 500
#define ZODB_64BIT_INTS
#include "_compat.h"
#include "intkeymacros.h"
#include "floatvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__LFBTree
#else
#define INITMODULE init_LFBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -25,12 +25,19 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "LL"
#define INITMODULE init_LLBTree
#define DEFAULT_MAX_BUCKET_SIZE 120
#define DEFAULT_MAX_BTREE_SIZE 500
#define ZODB_64BIT_INTS
#include "_compat.h"
#include "intkeymacros.h"
#include "intvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__LLBTree
#else
#define INITMODULE init_LLBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -23,12 +23,19 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "LO"
#define DEFAULT_MAX_BUCKET_SIZE 60
#define DEFAULT_MAX_BTREE_SIZE 500
#define INITMODULE init_LOBTree
#define ZODB_64BIT_INTS
#include "_compat.h"
#include "intkeymacros.h"
#include "objectvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__LOBTree
#else
#define INITMODULE init_LOBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -23,10 +23,17 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "OI"
#define INITMODULE init_OIBTree
#define DEFAULT_MAX_BUCKET_SIZE 60
#define DEFAULT_MAX_BTREE_SIZE 250
#include "_compat.h"
#include "objectkeymacros.h"
#include "intvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__OIBTree
#else
#define INITMODULE init_OIBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -23,12 +23,19 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "OL"
#define INITMODULE init_OLBTree
#define DEFAULT_MAX_BUCKET_SIZE 60
#define DEFAULT_MAX_BTREE_SIZE 250
#define ZODB_64BIT_INTS
#include "_compat.h"
#include "objectkeymacros.h"
#include "intvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__OLBTree
#else
#define INITMODULE init_OLBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -23,10 +23,17 @@
#define PERSISTENT
#define MOD_NAME_PREFIX "OO"
#define INITMODULE init_OOBTree
#define DEFAULT_MAX_BUCKET_SIZE 30
#define DEFAULT_MAX_BTREE_SIZE 250
#include "_compat.h"
#include "objectkeymacros.h"
#include "objectvaluemacros.h"
#ifdef PY3K
#define INITMODULE PyInit__OOBTree
#else
#define INITMODULE init_OOBTree
#endif
#include "BTreeModuleTemplate.c"
......@@ -21,6 +21,10 @@ from struct import error as struct_error
from persistent import Persistent
from .Interfaces import BTreesConflictError
from ._compat import PY3
from ._compat import cmp
from ._compat import int_types
from ._compat import xrange
_marker = object()
......@@ -156,7 +160,7 @@ class _SetIteration(object):
__slots__ = ('to_iterate',
'useValues',
'_next',
'_iter',
'active',
'position',
'key',
......@@ -172,15 +176,18 @@ class _SetIteration(object):
try:
itmeth = to_iterate.iteritems
except AttributeError:
itmeth = to_iterate.__iter__
useValues = False
if PY3 and isinstance(to_iterate, dict): #pragma NO COVER Py3k
itmeth = to_iterate.items().__iter__
else:
itmeth = to_iterate.__iter__
useValues = False
else:
self.value = None
else:
itmeth = to_iterate.__iter__
self.useValues = useValues
self._next = itmeth().next
self._iter = itmeth()
self.active = True
self.position = 0
self.key = _marker
......@@ -190,9 +197,9 @@ class _SetIteration(object):
def advance(self):
try:
if self.useValues:
self.key, self.value = self._next()
self.key, self.value = next(self._iter)
else:
self.key = self._next()
self.key = next(self._iter)
self.position += 1
except StopIteration:
self.active = False
......@@ -200,6 +207,15 @@ class _SetIteration(object):
return self
def _no_default_comparison(key):
# Enforce test that key has non-default comparison.
lt = getattr(key, '__lt__', None)
if lt is not None:
if getattr(lt, '__objclass__', None) is object: #pragma NO COVER Py3k
lt = None
if (lt is None and
getattr(key, '__cmp__', None) is None):
raise TypeError("Can't use default __cmp__")
class Bucket(_BucketBase):
......@@ -237,10 +253,7 @@ class Bucket(_BucketBase):
raise TypeError('items must be a sequence of 2-tuples')
def __setitem__(self, key, value):
# Enforce test that key has non-default comparison.
if ( getattr(key, '__lt__', None) is None and
getattr(key, '__cmp__', None) is None):
raise TypeError("Can't use default __cmp__")
_no_default_comparison(key)
self._set(self._to_key(key), self._to_value(value))
def __delitem__(self, key):
......@@ -298,7 +311,7 @@ class Bucket(_BucketBase):
def _split(self, index=-1):
if index < 0 or index >= len(self._keys):
index = len(self._keys) / 2
index = len(self._keys) // 2
new_instance = self.__class__()
new_instance._keys = self._keys[index:]
new_instance._values = self._values[index:]
......@@ -553,7 +566,7 @@ class Set(_BucketBase):
def _split(self, index=-1):
if index < 0 or index >= len(self._keys):
index = len(self._keys) / 2
index = len(self._keys) // 2
new_instance = self.__class__()
new_instance._keys = self._keys[index:]
del self._keys[index:]
......@@ -727,10 +740,7 @@ class _Tree(_Base):
set(*i)
def __setitem__(self, key, value):
# Enforce test that key has non-default comparison.
if ( getattr(key, '__lt__', None) is None and
getattr(key, '__cmp__', None) is None):
raise TypeError("Can't use default __cmp__")
_no_default_comparison(key)
self._set(self._to_key(key), self._to_value(value))
def __delitem__(self, key):
......@@ -742,6 +752,7 @@ class _Tree(_Base):
def __nonzero__(self):
return bool(self._data)
__bool__ = __nonzero__ #Py3k rename
def __len__(self):
l = 0
......@@ -760,7 +771,7 @@ class _Tree(_Base):
if data:
lo = 0
hi = len(data)
i = hi//2
i = hi // 2
while i > lo:
cmp_ = cmp(data[i].key, key)
if cmp_ < 0:
......@@ -769,7 +780,7 @@ class _Tree(_Base):
hi = i
else:
break
i = (lo+hi)//2
i = (lo + hi) // 2
return i
return -1
......@@ -882,7 +893,7 @@ class _Tree(_Base):
def _split(self, index=None):
data = self._data
if index is None:
index = len(data)//2
index = len(data) // 2
next = self.__class__()
next._data = data[index:]
......@@ -1083,7 +1094,7 @@ class _TreeItems(object):
while i > self.index:
try:
self.v = self.it.next()
self.v = next(self.it)
except StopIteration:
raise IndexError(i)
else:
......@@ -1158,7 +1169,6 @@ class TreeSet(_Tree):
__slots__ = ()
#_next = None
def add(self, key):
return self._set(self._to_key(key))[0]
......@@ -1270,6 +1280,14 @@ def intersection(set_type, o1, o2):
i2.advance()
return result
def _prepMergeIterators(o1, o2):
MERGE_DEFAULT = getattr(o1, 'MERGE_DEFAULT', None)
if MERGE_DEFAULT is None:
raise TypeError("invalid set operation")
i1 = _SetIteration(o1, True, MERGE_DEFAULT)
i2 = _SetIteration(o2, True, MERGE_DEFAULT)
return i1, i2
def weightedUnion(set_type, o1, o2, w1=1, w2=1):
if o1 is None:
if o2 is None:
......@@ -1277,9 +1295,7 @@ def weightedUnion(set_type, o1, o2, w1=1, w2=1):
return w2, o2
if o2 is None:
return w1, o1
MERGE_DEFAULT = getattr(o1, 'MERGE_DEFAULT', None)
i1 = _SetIteration(o1, True, MERGE_DEFAULT)
i2 = _SetIteration(o2, True, MERGE_DEFAULT)
i1, i2 = _prepMergeIterators(o1, o2)
MERGE = getattr(o1, 'MERGE', None)
if MERGE is None and i1.useValues and i2.useValues:
raise TypeError("invalid set operation")
......@@ -1287,12 +1303,6 @@ def weightedUnion(set_type, o1, o2, w1=1, w2=1):
if (not i1.useValues) and i2.useValues:
i1, i2 = i2, i1
w1, w2 = w2, w1
if MERGE_DEFAULT is None:
if i1.useValues:
if (not i2.useValues):
raise TypeError("invalid set operation")
else:
raise TypeError("invalid set operation")
_merging = i1.useValues or i2.useValues
if _merging:
result = o1._mapping_type()
......@@ -1333,22 +1343,13 @@ def weightedIntersection(set_type, o1, o2, w1=1, w2=1):
return w2, o2
if o2 is None:
return w1, o1
MERGE_DEFAULT = getattr(o1, 'MERGE_DEFAULT', None)
i1 = _SetIteration(o1, True, MERGE_DEFAULT)
i2 = _SetIteration(o2, True, MERGE_DEFAULT)
i1, i2 = _prepMergeIterators(o1, o2)
MERGE = getattr(o1, 'MERGE', None)
if MERGE is None and i1.useValues and i2.useValues:
raise TypeError("invalid set operation")
MERGE_WEIGHT = getattr(o1, 'MERGE_WEIGHT')
if (not i1.useValues) and i2.useValues:
i1, i2 = i2, i1
w1, w2 = w2, w1
if MERGE_DEFAULT is None:
if i1.useValues:
if (not i2.useValues):
raise TypeError("invalid set operation")
else:
raise TypeError("invalid set operation")
_merging = i1.useValues or i2.useValues
if _merging:
result = o1._mapping_type()
......@@ -1384,7 +1385,6 @@ def multiunion(set_type, seqs):
def to_ob(self, v):
return v
int_types = int, long
def to_int(self, v):
try:
# XXX Python 2.6 doesn't truncate, it spews a warning.
......@@ -1420,10 +1420,10 @@ def to_long(self, v):
return int(v)
def to_str(l):
def to_bytes(l):
def to(self, v):
if not (isinstance(v, str) and len(v) == l):
raise TypeError("%s-character string expected" % l)
if not (isinstance(v, bytes) and len(v) == l):
raise TypeError("%s-byte array expected" % l)
return v
return to
......
/* Straddle Python 2 / 3 */
#ifndef BTREES__COMPAT_H
#define BTREES__COMPAT_H
#include "Python.h"
#ifdef INTERN
#undef INTERN
#endif
#ifdef INT_FROM_LONG
#undef INT_FROM_LONG
#endif
#ifdef INT_CHECK
#undef INT_CHECK
#endif
#if PY_MAJOR_VERSION >= 3
#define PY3K
#define INTERN PyUnicode_InternFromString
#define INT_FROM_LONG(x) PyLong_FromLong(x)
#define INT_CHECK(x) PyLong_Check(x)
#define INT_AS_LONG(x) PyLong_AS_LONG(x)
#define INT_GETMAX(x) 2<<31
#define TEXT_FROM_STRING PyUnicode_FromString
#define TEXT_FORMAT PyUnicode_Format
#define COMPARE(lhs, rhs) \
PyObject_RichCompareBool((lhs), (rhs), Py_LT) > 0 ? -1 : \
(PyObject_RichCompareBool((lhs), (rhs), Py_EQ) > 0 ? 0 : 1)
#else
#define INTERN PyString_InternFromString
#define INT_FROM_LONG(x) PyInt_FromLong(x)
#define INT_CHECK(x) PyInt_Check(x)
#define INT_AS_LONG(x) PyInt_AS_LONG(x)
#define INT_GETMAX(x) PyInt_GetMax(x)
#define TEXT_FROM_STRING PyString_FromString
#define TEXT_FORMAT PyString_Format
#define COMPARE(lhs, rhs) PyObject_Compare((lhs), (rhs))
#endif
#endif /* BTREES__COMPAT_H */
##############################################################################
#
# Copyright (c) 2001-2012 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
import sys
if sys.version_info[0] < 3: #pragma NO COVER Python2
PY2 = True
PY3 = False
from StringIO import StringIO
BytesIO = StringIO
int_types = int, long
xrange = xrange
cmp = cmp
_bytes = str
def _ascii(x):
return bytes(x)
def _u(s, encoding='unicode_escape'):
return unicode(s, encoding)
else: #pragma NO COVER Python3
PY2 = False
PY3 = True
from io import StringIO
from io import BytesIO
int_types = int,
xrange = range
def cmp(x, y):
return (x > y) - (y > x)
_bytes = bytes
def _ascii(x):
return bytes(x, 'ascii')
def _u(s, encoding=None):
if encoding is None:
return s
return str(s, encoding)
......@@ -31,23 +31,24 @@ typedef unsigned char char6[6];
#define PERSISTENT
#define MOD_NAME_PREFIX "fs"
#define INITMODULE init_fsBTree
#define DEFAULT_MAX_BUCKET_SIZE 500
#define DEFAULT_MAX_BTREE_SIZE 500
#include "_compat.h"
/*#include "intkeymacros.h"*/
#define KEYMACROS_H "$Id$\n"
#define KEY_TYPE char2
#undef KEY_TYPE_IS_PYOBJECT
#define KEY_CHECK(K) (PyString_Check(K) && PyString_GET_SIZE(K)==2)
#define KEY_CHECK(K) (PyBytes_Check(K) && PyBytes_GET_SIZE(K)==2)
#define TEST_KEY_SET_OR(V, K, T) if ( ( (V) = ((*(K) < *(T) || (*(K) == *(T) && (K)[1] < (T)[1])) ? -1 : ((*(K) == *(T) && (K)[1] == (T)[1]) ? 0 : 1)) ), 0 )
#define DECREF_KEY(KEY)
#define INCREF_KEY(k)
#define COPY_KEY(KEY, E) (*(KEY)=*(E), (KEY)[1]=(E)[1])
#define COPY_KEY_TO_OBJECT(O, K) O=PyString_FromStringAndSize((const char*)K,2)
#define COPY_KEY_TO_OBJECT(O, K) O=PyBytes_FromStringAndSize((const char*)K,2)
#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \
if (KEY_CHECK(ARG)) memcpy(TARGET, PyString_AS_STRING(ARG), 2); else { \
if (KEY_CHECK(ARG)) memcpy(TARGET, PyBytes_AS_STRING(ARG), 2); else { \
PyErr_SetString(PyExc_TypeError, "expected two-character string key"); \
(STATUS)=0; }
......@@ -59,10 +60,10 @@ typedef unsigned char char6[6];
#define DECREF_VALUE(k)
#define INCREF_VALUE(k)
#define COPY_VALUE(V, E) (memcpy(V, E, 6))
#define COPY_VALUE_TO_OBJECT(O, K) O=PyString_FromStringAndSize((const char*)K,6)
#define COPY_VALUE_TO_OBJECT(O, K) O=PyBytes_FromStringAndSize((const char*)K,6)
#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \
if ((PyString_Check(ARG) && PyString_GET_SIZE(ARG)==6)) \
memcpy(TARGET, PyString_AS_STRING(ARG), 6); else { \
if ((PyBytes_Check(ARG) && PyBytes_GET_SIZE(ARG)==6)) \
memcpy(TARGET, PyBytes_AS_STRING(ARG), 6); else { \
PyErr_SetString(PyExc_TypeError, "expected six-character string key"); \
(STATUS)=0; }
......@@ -70,20 +71,29 @@ typedef unsigned char char6[6];
#include "Python.h"
static PyObject *bucket_toString(PyObject *self);
static PyObject *bucket_toBytes(PyObject *self);
static PyObject *bucket_fromString(PyObject *self, PyObject *state);
static PyObject *bucket_fromBytes(PyObject *self, PyObject *state);
#define EXTRA_BUCKET_METHODS \
{"toString", (PyCFunction) bucket_toString, METH_NOARGS, \
"toString() -- Return the state as a string"}, \
{"fromString", (PyCFunction) bucket_fromString, METH_O, \
"fromString(s) -- Set the state of the object from a string"}, \
{"toBytes", (PyCFunction) bucket_toBytes, METH_NOARGS, \
"toBytes() -- Return the state as a bytes array"}, \
{"fromBytes", (PyCFunction) bucket_fromBytes, METH_O, \
"fromSBytes(s) -- Set the state of the object from a bytes array"}, \
{"toString", (PyCFunction) bucket_toBytes, METH_NOARGS, \
"toString() -- Deprecated alias for 'toBytes'"}, \
{"fromString", (PyCFunction) bucket_fromBytes, METH_O, \
"fromString(s) -- Deprecated alias for 'fromBytes'"}, \
#ifdef PY3K
#define INITMODULE PyInit__fsBTree
#else
#define INITMODULE init_fsBTree
#endif
#include "BTreeModuleTemplate.c"
static PyObject *
bucket_toString(PyObject *oself)
bucket_toBytes(PyObject *oself)
{
Bucket *self = (Bucket *)oself;
PyObject *items = NULL;
......@@ -93,11 +103,11 @@ bucket_toString(PyObject *oself)
len = self->len;
items = PyString_FromStringAndSize(NULL, len*8);
items = PyBytes_FromStringAndSize(NULL, len*8);
if (items == NULL)
goto err;
memcpy(PyString_AS_STRING(items), self->keys, len*2);
memcpy(PyString_AS_STRING(items)+len*2, self->values, len*6);
memcpy(PyBytes_AS_STRING(items), self->keys, len*2);
memcpy(PyBytes_AS_STRING(items)+len*2, self->values, len*6);
PER_UNUSE(self);
return items;
......@@ -109,14 +119,14 @@ bucket_toString(PyObject *oself)
}
static PyObject *
bucket_fromString(PyObject *oself, PyObject *state)
bucket_fromBytes(PyObject *oself, PyObject *state)
{
Bucket *self = (Bucket *)oself;
int len;
KEY_TYPE *keys;
VALUE_TYPE *values;
len = PyString_Size(state);
len = PyBytes_Size(state);
if (len < 0)
return NULL;
......@@ -144,8 +154,8 @@ bucket_fromString(PyObject *oself, PyObject *state)
self->size = len;
}
memcpy(self->keys, PyString_AS_STRING(state), len*2);
memcpy(self->values, PyString_AS_STRING(state)+len*2, len*6);
memcpy(self->keys, PyBytes_AS_STRING(state), len*2);
memcpy(self->values, PyBytes_AS_STRING(state)+len*2, len*6);
self->len = len;
......
......@@ -378,30 +378,30 @@ class Printer(Walker): #pragma NO COVER
def visit_btree(self, obj, path, parent, is_mapping,
keys, kids, lo, hi):
indent = " " * len(path)
print "%s%s %s with %d children" % (
print("%s%s %s with %d children" % (
indent,
".".join(map(str, path)),
type_and_adr(obj),
len(kids))
len(kids)))
indent += " "
n = len(keys)
for i in range(n):
print "%skey %d: %r" % (indent, i, keys[i])
print("%skey %d: %r" % (indent, i, keys[i]))
def visit_bucket(self, obj, path, parent, is_mapping,
keys, values, lo, hi):
indent = " " * len(path)
print "%s%s %s with %d keys" % (
print("%s%s %s with %d keys" % (
indent,
".".join(map(str, path)),
type_and_adr(obj),
len(keys))
len(keys)))
indent += " "
n = len(keys)
for i in range(n):
print "%skey %d: %r" % (indent, i, keys[i]),
print("%skey %d: %r" % (indent, i, keys[i]),)
if is_mapping:
print "value %r" % (values[i],)
print("value %r" % (values[i],))
def check(btree):
"""Check internal value-based invariants in a BTree or TreeSet.
......
......@@ -13,7 +13,7 @@
#define COPY_VALUE_TO_OBJECT(O, K) O=PyFloat_FromDouble(K)
#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \
if (PyFloat_Check(ARG)) TARGET = (float)PyFloat_AsDouble(ARG); \
else if (PyInt_Check(ARG)) TARGET = (float)PyInt_AsLong(ARG); \
else if (INT_CHECK(ARG)) TARGET = (float)INT_AS_LONG(ARG); \
else { \
PyErr_SetString(PyExc_TypeError, "expected float or int value"); \
(STATUS)=0; (TARGET)=0; }
......
......@@ -14,10 +14,11 @@
# fsBTrees are data structures used for ZODB FileStorage. They are not
# expected to be "public" excpect to FileStorage.
# Each item in an fsBTree maps a two-byte key to a six-byte value.
__all__ = ('Bucket', 'Set', 'BTree', 'TreeSet',
'fsBucket', 'fsSet', 'fsBTree', 'fsTreeSet',
'union', 'intersection', 'difference', 'multiunion',
'union', 'intersection', 'difference',
)
......@@ -30,16 +31,15 @@ from ._base import Tree as BTree
from ._base import TreeSet
from ._base import difference as _difference
from ._base import intersection as _intersection
from ._base import multiunion as _multiunion
from ._base import set_operation as _set_operation
from ._base import to_str as _to_str
from ._base import to_bytes as _to_bytes
from ._base import union as _union
_BUCKET_SIZE = 500
_TREE_SIZE = 500
using64bits = False
_to_key = _to_str(2)
_to_value = _to_str(6)
_to_key = _to_bytes(2)
_to_value = _to_bytes(6)
class fsBucketPy(Bucket):
......@@ -47,11 +47,8 @@ class fsBucketPy(Bucket):
_to_key = _to_key
_to_value = _to_value
def MERGE_WEIGHT(self, value, weight):
return value
def toString(self):
return ''.join(self._keys) + ''.join(self._values)
return b''.join(self._keys) + b''.join(self._values)
def fromString(self, v):
length = len(v)
......@@ -77,8 +74,6 @@ class fsBTreePy(BTree):
MAX_SIZE = _TREE_SIZE
_to_key = _to_key
_to_value = _to_value
def MERGE_WEIGHT(self, value, weight):
return value
class fsTreeSetPy(TreeSet):
......@@ -104,18 +99,10 @@ fsTreeSetPy._set_type = fsTreeSetPy._bucket_type = fsSetPy
differencePy = _set_operation(_difference, fsSetPy)
unionPy = _set_operation(_union, fsSetPy)
intersectionPy = _set_operation(_intersection, fsSetPy)
multiunionPy = _set_operation(_multiunion, fsSetPy)
try:
from _fsBTree import fsBucket
from _fsBTree import fsSet
from _fsBTree import fsBTree
from _fsBTree import fsTreeSet
from _fsBTree import difference
from _fsBTree import union
from _fsBTree import intersection
from _fsBTree import multiunion
except ImportError: #pragma NO COVER
from ._fsBTree import fsBucket
except ImportError: #pragma NO COVER w/ C extensions
fsBucket = fsBucketPy
fsSet = fsSetPy
fsBTree = fsBTreePy
......@@ -123,7 +110,13 @@ except ImportError: #pragma NO COVER
difference = differencePy
union = unionPy
intersection = intersectionPy
multiunion = multiunionPy
else: #pragma NO COVER w/o C extensions
from ._fsBTree import fsSet
from ._fsBTree import fsBTree
from ._fsBTree import fsTreeSet
from ._fsBTree import difference
from ._fsBTree import union
from ._fsBTree import intersection
Bucket = fsBucket
Set = fsSet
......
......@@ -4,26 +4,23 @@
#ifdef ZODB_64BIT_INTS
/* PY_LONG_LONG as key */
#define NEED_LONG_LONG_SUPPORT
#define NEED_LONG_LONG_KEYS
#define KEY_TYPE PY_LONG_LONG
#define KEY_CHECK longlong_check
#define COPY_KEY_TO_OBJECT(O, K) O=longlong_as_object(K)
#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \
if (PyInt_Check(ARG)) TARGET=PyInt_AS_LONG(ARG); else \
if (longlong_check(ARG)) TARGET=PyLong_AsLongLong(ARG); else \
if (PyLong_Check(ARG)) { \
PyErr_SetString(PyExc_ValueError, "long integer out of range"); \
(STATUS)=0; (TARGET)=0; } \
else { \
PyErr_SetString(PyExc_TypeError, "expected integer key"); \
(STATUS)=0; (TARGET)=0; }
if (!longlong_convert((ARG), &TARGET)) \
{ \
(STATUS)=0; (TARGET)=0; \
}
#else
/* C int as key */
#define KEY_TYPE int
#define KEY_CHECK PyInt_Check
#define COPY_KEY_TO_OBJECT(O, K) O=PyInt_FromLong(K)
#define KEY_CHECK INT_CHECK
#define COPY_KEY_TO_OBJECT(O, K) O=INT_FROM_LONG(K)
#define COPY_KEY_FROM_ARG(TARGET, ARG, STATUS) \
if (PyInt_Check(ARG)) { \
long vcopy = PyInt_AS_LONG(ARG); \
if (INT_CHECK(ARG)) { \
long vcopy = INT_AS_LONG(ARG); \
if ((int)vcopy != vcopy) { \
PyErr_SetString(PyExc_TypeError, "integer out of range"); \
(STATUS)=0; (TARGET)=0; \
......
......@@ -7,7 +7,7 @@
#define VALUE_PARSE "L"
#define COPY_VALUE_TO_OBJECT(O, K) O=longlong_as_object(K)
#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \
if (PyInt_Check(ARG)) TARGET=PyInt_AS_LONG(ARG); else \
if (INT_CHECK(ARG)) TARGET=INT_AS_LONG(ARG); else \
if (longlong_check(ARG)) TARGET=PyLong_AsLongLong(ARG); else \
if (PyLong_Check(ARG)) { \
PyErr_SetString(PyExc_ValueError, "long integer out of range"); \
......@@ -18,11 +18,11 @@
#else
#define VALUE_TYPE int
#define VALUE_PARSE "i"
#define COPY_VALUE_TO_OBJECT(O, K) O=PyInt_FromLong(K)
#define COPY_VALUE_TO_OBJECT(O, K) O=INT_FROM_LONG(K)
#define COPY_VALUE_FROM_ARG(TARGET, ARG, STATUS) \
if (PyInt_Check(ARG)) { \
long vcopy = PyInt_AS_LONG(ARG); \
if (INT_CHECK(ARG)) { \
long vcopy = INT_AS_LONG(ARG); \
if ((int)vcopy != vcopy) { \
PyErr_SetString(PyExc_TypeError, "integer out of range"); \
(STATUS)=0; (TARGET)=0; \
......
......@@ -3,35 +3,34 @@
#define KEY_TYPE_IS_PYOBJECT
#include "Python.h"
#include "_compat.h"
static PyObject *object_;
static PyObject *object_; /* initialized in BTreeModuleTemplate init */
static int
check_argument_cmp(PyObject *arg)
{
/* printf("check cmp %p %p %p %p\n", */
/* arg->ob_type->tp_richcompare, */
/* ((PyTypeObject *)object_)->ob_type->tp_richcompare, */
/* arg->ob_type->tp_compare, */
/* ((PyTypeObject *)object_)->ob_type->tp_compare); */
/* printf("check cmp %p %p %p %p\n", */
/* arg->ob_type->tp_richcompare, */
/* ((PyTypeObject *)object_)->ob_type->tp_richcompare, */
/* arg->ob_type->tp_compare, */
/* ((PyTypeObject *)object_)->ob_type->tp_compare); */
if (arg->ob_type->tp_richcompare == NULL
&&
#if PY_MAJOR_VERSION==2 && PY_MINOR_VERSION < 6
arg->ob_type->tp_compare == NULL
#ifdef PY3K
if (Py_TYPE(arg)->tp_richcompare == Py_TYPE(object_)->tp_richcompare)
#else
arg->ob_type->tp_compare ==
((PyTypeObject *)object_)->ob_type->tp_compare
if (Py_TYPE(arg)->tp_richcompare == NULL
&& Py_TYPE(arg)->tp_compare == Py_TYPE(object_)->tp_compare)
#endif
)
{
PyErr_SetString(PyExc_TypeError, "Object has default comparison");
return 0;
PyErr_SetString(PyExc_TypeError, "Object has default comparison");
return 0;
}
return 1;
return 1;
}
#define TEST_KEY_SET_OR(V, KEY, TARGET) if ( ( (V) = PyObject_Compare((KEY),(TARGET)) ), PyErr_Occurred() )
#define TEST_KEY_SET_OR(V, KEY, TARGET) \
if ( ( (V) = COMPARE((KEY),(TARGET)) ), PyErr_Occurred() )
#define INCREF_KEY(k) Py_INCREF(k)
#define DECREF_KEY(KEY) Py_DECREF(KEY)
#define COPY_KEY(KEY, E) KEY=(E)
......
#define VALUEMACROS_H "$Id$\n"
#define VALUE_TYPE PyObject *
#define VALUE_TYPE_IS_PYOBJECT
#define TEST_VALUE(VALUE, TARGET) PyObject_Compare((VALUE),(TARGET))
#define TEST_VALUE(VALUE, TARGET) COMPARE((VALUE),(TARGET))
#define DECLARE_VALUE(NAME) VALUE_TYPE NAME
#define INCREF_VALUE(k) Py_INCREF(k)
#define DECREF_VALUE(k) Py_DECREF(k)
......
/* Backport type definitions from Python 2.5's object.h */
#ifndef BTREE_PY24COMPATH_H
#define BTREE_PY24COMPAT_H
#if PY_VERSION_HEX < 0x02050000
typedef Py_ssize_t (*lenfunc)(PyObject *);
typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
typedef PyObject *(*ssizessizeargfunc)(PyObject *, Py_ssize_t, Py_ssize_t);
typedef int(*ssizeobjargproc)(PyObject *, Py_ssize_t, PyObject *);
typedef int(*ssizessizeobjargproc)(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
#endif /* PY_VERSION_HEX */
#endif /* BTREE_PY24COMPAT_H */
......@@ -22,6 +22,16 @@ def _skip_wo_ZODB(test_method): #pragma NO COVER
else:
return test_method
def _skip_under_Py3k(test_method): #pragma NO COVER
try:
unicode
except NameError: # skip this test
def _dummy(*args):
pass
return _dummy
else:
return test_method
class Base(object):
# Tests common to all types: sets, buckets, and BTrees
......@@ -75,7 +85,7 @@ class Base(object):
def testSetstateArgumentChecking(self):
try:
self._makeOne().__setstate__(('',))
except TypeError, v:
except TypeError as v:
self.assertEqual(str(v), 'tuple required for first state element')
else:
raise AssertionError("Expected exception")
......@@ -171,17 +181,17 @@ class Base(object):
# Modifying a thing
remove(100)
self.assert_(t in read)
self.assertTrue(t in read)
del read[:]
add(100)
self.assert_(t in read)
self.assertTrue(t in read)
del read[:]
transaction.abort()
conn.cacheMinimize()
list(t)
self.assert_(100 in t)
self.assert_(not read)
self.assertTrue(100 in t)
self.assertTrue(not read)
class MappingBase(Base):
......@@ -189,7 +199,8 @@ class MappingBase(Base):
def _populate(self, t, l):
# Make some data
for i in range(l): t[i]=i
for i in range(l):
t[i]=i
def testRepr(self):
# test the repr because buckets have a complex repr implementation
......@@ -203,7 +214,7 @@ class MappingBase(Base):
# But since the test is also run for btrees, skip the length
# check if the repr starts with '<'
if not r.startswith('<'):
self.assert_(len(r) > 10000)
self.assertTrue(len(r) > 10000)
def testGetItemFails(self):
self.assertRaises(KeyError, self._getitemfail)
......@@ -219,7 +230,7 @@ class MappingBase(Base):
t = self._makeOne()
t[1] = 1
a = t[1]
self.assertEqual(a , 1, `a`)
self.assertEqual(a , 1, repr(a))
def testReplaceWorks(self):
t = self._makeOne()
......@@ -232,7 +243,7 @@ class MappingBase(Base):
import random
t = self._makeOne()
added = {}
r = range(1000)
r = list(range(1000))
for x in r:
k = random.choice(r)
t[k] = x
......@@ -241,12 +252,14 @@ class MappingBase(Base):
self.assertEqual(len(t) , len(addl), len(t))
def testHasKeyWorks(self):
from .._compat import PY2
t = self._makeOne()
t[1] = 1
self.assert_(t.has_key(1))
self.assert_(1 in t)
self.assert_(0 not in t)
self.assert_(2 not in t)
if PY2:
self.assertTrue(t.has_key(1))
self.assertTrue(1 in t)
self.assertTrue(0 not in t)
self.assertTrue(2 not in t)
def testValuesWorks(self):
t = self._makeOne()
......@@ -267,20 +280,18 @@ class MappingBase(Base):
t[99-x] = x
for x in range(40):
lst = list(t.values(0+x,99-x))
lst.sort()
self.assertEqual(lst,range(0+x,99-x+1))
lst = sorted(t.values(0+x,99-x))
self.assertEqual(lst, list(range(0+x,99-x+1)))
lst = list(t.values(max=99-x, min=0+x))
lst.sort()
self.assertEqual(lst,range(0+x,99-x+1))
lst = sorted(t.values(max=99-x, min=0+x))
self.assertEqual(lst, list(range(0+x,99-x+1)))
def testValuesNegativeIndex(self):
t = self._makeOne()
L = [-3, 6, -11, 4]
for i in L:
t[i] = i
L.sort()
L = sorted(L)
vals = t.values()
for i in range(-1, -5, -1):
self.assertEqual(vals[i], L[i])
......@@ -299,10 +310,10 @@ class MappingBase(Base):
for x in range(40):
lst = t.keys(0+x,99-x)
self.assertEqual(list(lst), range(0+x, 99-x+1))
self.assertEqual(list(lst), list(range(0+x, 99-x+1)))
lst = t.keys(max=99-x, min=0+x)
self.assertEqual(list(lst), range(0+x, 99-x+1))
self.assertEqual(list(lst), list(range(0+x, 99-x+1)))
self.assertEqual(len(v), 100)
......@@ -311,7 +322,7 @@ class MappingBase(Base):
L = [-3, 6, -11, 4]
for i in L:
t[i] = i
L.sort()
L = sorted(L)
keys = t.keys()
for i in range(-1, -5, -1):
self.assertEqual(keys[i], L[i])
......@@ -335,17 +346,17 @@ class MappingBase(Base):
i += 1
items = list(t.items(min=12, max=20))
self.assertEqual(items, zip(range(12, 21), range(24, 43, 2)))
self.assertEqual(items, list(zip(range(12, 21), range(24, 43, 2))))
items = list(t.iteritems(min=12, max=20))
self.assertEqual(items, zip(range(12, 21), range(24, 43, 2)))
self.assertEqual(items, list(zip(range(12, 21), range(24, 43, 2))))
def testItemsNegativeIndex(self):
t = self._makeOne()
L = [-3, 6, -11, 4]
for i in L:
t[i] = i
L.sort()
L = sorted(L)
items = t.items()
for i in range(-1, -5, -1):
self.assertEqual(items[i], (L[i], L[i]))
......@@ -378,14 +389,14 @@ class MappingBase(Base):
try:
t.maxKey(t.minKey() - 1)
except ValueError, err:
except ValueError as err:
self.assertEqual(str(err), "no key satisfies the conditions")
else:
self.fail("expected ValueError")
try:
t.minKey(t.maxKey() + 1)
except ValueError, err:
except ValueError as err:
self.assertEqual(str(err), "no key satisfies the conditions")
else:
self.fail("expected ValueError")
......@@ -393,7 +404,7 @@ class MappingBase(Base):
def testClear(self):
import random
t = self._makeOne()
r = range(100)
r = list(range(100))
for x in r:
rnd = random.choice(r)
t[rnd] = 0
......@@ -411,8 +422,7 @@ class MappingBase(Base):
d[k]=i
l.append((k, i))
items=d.items()
items.sort()
items= sorted(d.items())
t.update(d)
self.assertEqual(list(t.items()), items)
......@@ -447,7 +457,7 @@ class MappingBase(Base):
# For IITreeSets, this one was returning 31 for len(keys), and
# list(keys) produced a list with 100 elements.
t.clear()
t.update(zip(range(300), range(300)))
t.update(list(zip(range(300), range(300))))
keys = t.keys(200, 50)
self.assertEqual(len(keys), 0)
self.assertEqual(list(keys), [])
......@@ -464,6 +474,7 @@ class MappingBase(Base):
# This tests fixes to several bugs in this area, starting with
# http://collector.zope.org/Zope/419,
# "BTreeItems slice contains 1 too many elements".
from .._compat import xrange
t = self._makeOne()
for n in range(10):
t.clear()
......@@ -531,12 +542,12 @@ class MappingBase(Base):
t[i] = 1
tslice = t.items()[20:80]
self.assertEqual(len(tslice), 60)
self.assertEqual(list(tslice), zip(range(20, 80), [1]*60))
self.assertEqual(list(tslice), list(zip(range(20, 80), [1]*60)))
def testIterators(self):
t = self._makeOne()
for keys in [], [-2], [1, 4], range(-170, 2000, 6):
for keys in [], [-2], [1, 4], list(range(-170, 2000, 6)):
t.clear()
for k in keys:
t[k] = -3 * k
......@@ -549,11 +560,11 @@ class MappingBase(Base):
self.assertEqual(x, keys)
it = iter(t)
self.assert_(it is iter(it))
self.assertTrue(it is iter(it))
x = []
try:
while 1:
x.append(it.next())
x.append(next(it))
except StopIteration:
pass
self.assertEqual(x, keys)
......@@ -565,14 +576,14 @@ class MappingBase(Base):
def testRangedIterators(self):
t = self._makeOne()
for keys in [], [-2], [1, 4], range(-170, 2000, 13):
for keys in [], [-2], [1, 4], list(range(-170, 2000, 13)):
t.clear()
values = []
for k in keys:
value = -3 * k
t[k] = value
values.append(value)
items = zip(keys, values)
items = list(zip(keys, values))
self.assertEqual(list(t.iterkeys()), keys)
self.assertEqual(list(t.itervalues()), values)
......@@ -593,7 +604,7 @@ class MappingBase(Base):
got = t.itervalues(lo)
self.assertEqual(goodvalues, list(got))
gooditems = zip(goodkeys, goodvalues)
gooditems = list(zip(goodkeys, goodvalues))
got = t.iteritems(lo)
self.assertEqual(gooditems, list(got))
......@@ -607,7 +618,7 @@ class MappingBase(Base):
got = t.itervalues(lo, max=hi)
self.assertEqual(goodvalues, list(got))
gooditems = zip(goodkeys, goodvalues)
gooditems = list(zip(goodkeys, goodvalues))
got = t.iteritems(max=hi, min=lo)
self.assertEqual(gooditems, list(got))
......@@ -684,7 +695,7 @@ class MappingBase(Base):
self.assertEqual(t.setdefault(1, 2), 2)
# That should also have associated 1 with 2 in the tree.
self.assert_(1 in t)
self.assertTrue(1 in t)
self.assertEqual(t[1], 2)
# And trying to change it again should have no effect.
self.assertEqual(t.setdefault(1, 666), 2)
......@@ -820,14 +831,13 @@ class BTreeTests(MappingBase):
import random
t = self._makeOne()
added = {}
r = range(100)
r = list(range(100))
for x in r:
k = random.choice(r)
if not added.has_key(k):
if k not in added:
t[k] = x
added[k] = 1
addl = added.keys()
addl.sort()
addl = sorted(added.keys())
diff = lsubtract(list(t.keys()), addl)
self.assertEqual(diff , [], (diff, addl, list(t.keys())))
self._checkIt(t)
......@@ -836,13 +846,12 @@ class BTreeTests(MappingBase):
import random
t = self._makeOne()
added = {}
r = range(100)
r = list(range(100))
for x in r:
k = random.choice(r)
t[k] = x
added[k] = 1
addl = added.keys()
addl.sort()
addl = sorted(added.keys())
diff = lsubtract(t.keys(), addl)
self.assertEqual(diff , [], diff)
self._checkIt(t)
......@@ -850,7 +859,7 @@ class BTreeTests(MappingBase):
def testRandomDeletes(self):
import random
t = self._makeOne()
r = range(1000)
r = list(range(1000))
added = []
for x in r:
k = random.choice(r)
......@@ -859,15 +868,15 @@ class BTreeTests(MappingBase):
deleted = []
for x in r:
k = random.choice(r)
if t.has_key(k):
self.assert_(k in t)
if k in t:
self.assertTrue(k in t)
del t[k]
deleted.append(k)
if t.has_key(k):
if k in t:
self.fail( "had problems deleting %s" % k )
badones = []
for x in deleted:
if t.has_key(x):
if x in t:
badones.append(x)
self.assertEqual(badones , [], (badones, added, deleted))
self._checkIt(t)
......@@ -875,7 +884,7 @@ class BTreeTests(MappingBase):
def testTargetedDeletes(self):
import random
t = self._makeOne()
r = range(1000)
r = list(range(1000))
for x in r:
k = random.choice(r)
t[k] = x
......@@ -889,7 +898,7 @@ class BTreeTests(MappingBase):
def testPathologicalRightBranching(self):
t = self._makeOne()
r = range(1000)
r = list(range(1000))
for x in r:
t[x] = 1
self.assertEqual(realseq(t.keys()) , r, realseq(t.keys()))
......@@ -900,9 +909,8 @@ class BTreeTests(MappingBase):
def testPathologicalLeftBranching(self):
t = self._makeOne()
r = range(1000)
revr = r[:]
revr.reverse()
r = list(range(1000))
revr = list(reversed(r[:]))
for x in revr:
t[x] = 1
self.assertEqual(realseq(t.keys()) , r, realseq(t.keys()))
......@@ -955,9 +963,10 @@ class BTreeTests(MappingBase):
for x in add_order:
t[x] = 1
for x in delete_order:
try: del t[x]
try:
del t[x]
except KeyError:
if t.has_key(x):
if x in t:
self.assertEqual(1,2,"failed to delete %s" % x)
self._checkIt(t)
......@@ -989,14 +998,14 @@ class BTreeTests(MappingBase):
for i in range(200):
t[i] = i
items, dummy = t.__getstate__()
self.assert_(len(items) > 2) # at least two buckets and a key
self.assertTrue(len(items) > 2) # at least two buckets and a key
# All values in the first bucket are < firstkey. All in the
# second bucket are >= firstkey, and firstkey is the first key in
# the second bucket.
firstkey = items[1]
therange = t.keys(-1, firstkey)
self.assertEqual(len(therange), firstkey + 1)
self.assertEqual(list(therange), range(firstkey + 1))
self.assertEqual(list(therange), list(range(firstkey + 1)))
# Now for the tricky part. If we delete firstkey, the second bucket
# loses its smallest key, but firstkey remains in the BTree node.
# If we then do a high-end range search on firstkey, the BTree node
......@@ -1008,7 +1017,7 @@ class BTreeTests(MappingBase):
del t[firstkey]
therange = t.keys(min=-1, max=firstkey)
self.assertEqual(len(therange), firstkey)
self.assertEqual(list(therange), range(firstkey))
self.assertEqual(list(therange), list(range(firstkey)))
self._checkIt(t)
def testInsertMethod(self):
......@@ -1037,11 +1046,11 @@ class BTreeTests(MappingBase):
for dummy in range(20):
try:
del t[k[0]]
except RuntimeError, detail:
except RuntimeError as detail:
self.assertEqual(str(detail), "the bucket being iterated "
"changed size")
break
except KeyError, v:
except KeyError as v:
# The Python implementation behaves very differently and
# gives a key error in this situation. It can't mess up
# memory and can't readily detect changes to underlying buckets
......@@ -1069,22 +1078,28 @@ class NormalSetTests(Base):
self.assertEqual(t.add(5) , 0)
def testInsert(self):
from .._compat import PY2
t = self._makeOne()
t.insert(1)
self.assert_(t.has_key(1))
self.assert_(1 in t)
self.assert_(2 not in t)
if PY2:
self.assertTrue(t.has_key(1))
self.assertTrue(1 in t)
self.assertTrue(2 not in t)
def testBigInsert(self):
from .._compat import PY2
from .._compat import xrange
t = self._makeOne()
r = xrange(10000)
for x in r:
t.insert(x)
for x in r:
self.assert_(t.has_key(x))
self.assert_(x in t)
if PY2:
self.assertTrue(t.has_key(x))
self.assertTrue(x in t)
def testRemoveSucceeds(self):
from .._compat import xrange
t = self._makeOne()
r = xrange(10000)
for x in r: t.insert(x)
......@@ -1097,11 +1112,14 @@ class NormalSetTests(Base):
self._makeOne().remove(1)
def testHasKeyFails(self):
from .._compat import PY2
t = self._makeOne()
self.assert_(not t.has_key(1))
self.assert_(1 not in t)
if PY2:
self.assertTrue(not t.has_key(1))
self.assertTrue(1 not in t)
def testKeys(self):
from .._compat import xrange
t = self._makeOne()
r = xrange(1000)
for x in r:
......@@ -1111,6 +1129,7 @@ class NormalSetTests(Base):
def testClear(self):
from .._compat import xrange
t = self._makeOne()
r = xrange(1000)
for x in r: t.insert(x)
......@@ -1134,21 +1153,21 @@ class NormalSetTests(Base):
self.assertEqual(t.minKey() , 1)
self.assertEqual(t.minKey(3) , 3)
self.assertEqual(t.minKey(9) , 10)
self.assert_(t.minKey() in t)
self.assert_(t.minKey()-1 not in t)
self.assert_(t.maxKey() in t)
self.assert_(t.maxKey()+1 not in t)
self.assertTrue(t.minKey() in t)
self.assertTrue(t.minKey()-1 not in t)
self.assertTrue(t.maxKey() in t)
self.assertTrue(t.maxKey()+1 not in t)
try:
t.maxKey(t.minKey() - 1)
except ValueError, err:
except ValueError as err:
self.assertEqual(str(err), "no key satisfies the conditions")
else:
self.fail("expected ValueError")
try:
t.minKey(t.maxKey() + 1)
except ValueError, err:
except ValueError as err:
self.assertEqual(str(err), "no key satisfies the conditions")
else:
self.fail("expected ValueError")
......@@ -1163,8 +1182,7 @@ class NormalSetTests(Base):
d[k]=i
l.append(k)
items = d.keys()
items.sort()
items = sorted(d.keys())
t.update(l)
self.assertEqual(list(t.keys()), items)
......@@ -1203,28 +1221,28 @@ class NormalSetTests(Base):
self.assertEqual(len(t), n)
kslice = t.keys()
self.assertEqual(len(kslice), n)
self.assertEqual(len(list(kslice)), n)
# Test whole-structure slices.
x = kslice[:]
self.assertEqual(list(x), keys[:])
self.assertEqual(list(x), list(keys[:]))
for lo in range(-2*n, 2*n+1):
# Test one-sided slices.
x = kslice[:lo]
self.assertEqual(list(x), keys[:lo])
self.assertEqual(list(x), list(keys[:lo]))
x = kslice[lo:]
self.assertEqual(list(x), keys[lo:])
self.assertEqual(list(x), list(keys[lo:]))
for hi in range(-2*n, 2*n+1):
# Test two-sided slices.
x = kslice[lo:hi]
self.assertEqual(list(x), keys[lo:hi])
self.assertEqual(list(x), list(keys[lo:hi]))
def testIterator(self):
t = self._makeOne()
for keys in [], [-2], [1, 4], range(-170, 2000, 6):
for keys in [], [-2], [1, 4], list(range(-170, 2000, 6)):
t.clear()
t.update(keys)
......@@ -1236,11 +1254,11 @@ class NormalSetTests(Base):
self.assertEqual(x, keys)
it = iter(t)
self.assert_(it is iter(it))
self.assertTrue(it is iter(it))
x = []
try:
while 1:
x.append(it.next())
x.append(next(it))
except StopIteration:
pass
self.assertEqual(x, keys)
......@@ -1248,12 +1266,14 @@ class NormalSetTests(Base):
class ExtendedSetTests(NormalSetTests):
def testLen(self):
from .._compat import xrange
t = self._makeOne()
r = xrange(10000)
for x in r: t.insert(x)
self.assertEqual(len(t) , 10000, len(t))
def testGetItem(self):
from .._compat import xrange
t = self._makeOne()
r = xrange(10000)
for x in r: t.insert(x)
......@@ -1300,10 +1320,10 @@ class InternalKeysMappingTest(object):
key = data[1]
del tree[key]
data = tree.__getstate__()[0]
self.assert_(data[1] != key)
self.assertTrue(data[1] != key)
# The tree should have changed:
self.assert_(tree._p_changed)
self.assertTrue(tree._p_changed)
# Grow the btree until we have multiple levels
while 1:
......@@ -1318,7 +1338,7 @@ class InternalKeysMappingTest(object):
key = data[1]
del tree[key]
data = tree.__getstate__()[0]
self.assert_(data[1] != key)
self.assertTrue(data[1] != key)
transaction.abort()
db.close()
......@@ -1340,7 +1360,7 @@ class ModuleTest(object):
for name in ('Bucket', 'BTree', 'Set', 'TreeSet'):
klass = getattr(self._getModule(), name)
self.assertEqual(klass.__module__, self._getModule().__name__)
self.assert_(klass is getattr(self._getModule(),
self.assertTrue(klass is getattr(self._getModule(),
self.prefix + name))
def testModuleProvides(self):
......@@ -1350,12 +1370,12 @@ class ModuleTest(object):
def testFamily(self):
import BTrees
if self.prefix == 'OO':
self.assert_(
self.assertTrue(
getattr(self._getModule(), 'family', self) is self)
elif 'L' in self.prefix:
self.assert_(self._getModule().family is BTrees.family64)
self.assertTrue(self._getModule().family is BTrees.family64)
elif 'I' in self.prefix:
self.assert_(self._getModule().family is BTrees.family32)
self.assertTrue(self._getModule().family is BTrees.family32)
class TypeTest(object):
......@@ -1423,6 +1443,12 @@ class TestLongIntSupport:
class TestLongIntKeys(TestLongIntSupport):
def _makeLong(self, v):
try:
return long(v)
except NameError: #pragma NO COVER Py3k
return int(v)
def testLongIntKeysWork(self):
from BTrees.IIBTree import using64bits
if not using64bits:
......@@ -1432,10 +1458,11 @@ class TestLongIntKeys(TestLongIntSupport):
assert o1 != o2
# Test some small key values first:
t[0L] = o1
zero_long = self._makeLong(0)
t[zero_long] = o1
self.assertEqual(t[0], o1)
t[0] = o2
self.assertEqual(t[0L], o2)
self.assertEqual(t[zero_long], o2)
self.assertEqual(list(t.keys()), [0])
# Test some large key values too:
......@@ -1469,8 +1496,7 @@ class TestLongIntValues(TestLongIntSupport):
if not using64bits:
return
t = self._makeOne()
keys = list(self.getTwoKeys())
keys.sort()
keys = sorted(self.getTwoKeys())
k1, k2 = keys
assert k1 != k2
......@@ -1500,7 +1526,7 @@ class TestLongIntValues(TestLongIntSupport):
# that builds an object of that type given only a list of keys.
def makeBuilder(mapbuilder):
def result(keys=[], mapbuilder=mapbuilder):
return mapbuilder(zip(keys, keys))
return mapbuilder(list(zip(keys, keys)))
return result
# Subclasses have to set up:
......@@ -1521,8 +1547,7 @@ class SetResult(object):
for e in y:
if e not in result:
result.append(e)
result.sort()
return result
return sorted(result)
def _intersection(self, x, y):
result = []
......@@ -1544,39 +1569,39 @@ class SetResult(object):
def testNone(self):
for op in self.union, self.intersection, self.difference:
C = op(None, None)
self.assert_(C is None)
self.assertTrue(C is None)
for op in self.union, self.intersection, self.difference:
for A in self.As:
C = op(A, None)
self.assert_(C is A)
self.assertTrue(C is A)
C = op(None, A)
if op == self.difference:
self.assert_(C is None)
self.assertTrue(C is None)
else:
self.assert_(C is A)
self.assertTrue(C is A)
def testEmptyUnion(self):
for A in self.As:
for E in self.emptys:
C = self.union(A, E)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), self.Akeys)
C = self.union(E, A)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), self.Akeys)
def testEmptyIntersection(self):
for A in self.As:
for E in self.emptys:
C = self.intersection(A, E)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), [])
C = self.intersection(E, A)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), [])
def testEmptyDifference(self):
......@@ -1599,7 +1624,7 @@ class SetResult(object):
for A in inputs:
for B in inputs:
C = self.union(A, B)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), self._union(A, B))
def testIntersection(self):
......@@ -1607,7 +1632,7 @@ class SetResult(object):
for A in inputs:
for B in inputs:
C = self.intersection(A, B)
self.assert_(not hasattr(C, "values"))
self.assertTrue(not hasattr(C, "values"))
self.assertEqual(list(C), self._intersection(A, B))
def testDifference(self):
......@@ -1673,39 +1698,39 @@ class Weighted(object):
def testBothNone(self):
for op in self.weightedUnion(), self.weightedIntersection():
w, C = op(None, None)
self.assert_(C is None)
self.assertTrue(C is None)
self.assertEqual(w, 0)
w, C = op(None, None, 42, 666)
self.assert_(C is None)
self.assertTrue(C is None)
self.assertEqual(w, 0)
def testLeftNone(self):
for op in self.weightedUnion(), self.weightedIntersection():
for A in self.As + self.emptys:
w, C = op(None, A)
self.assert_(C is A)
self.assertTrue(C is A)
self.assertEqual(w, 1)
w, C = op(None, A, 42, 666)
self.assert_(C is A)
self.assertTrue(C is A)
self.assertEqual(w, 666)
def testRightNone(self):
for op in self.weightedUnion(), self.weightedIntersection():
for A in self.As + self.emptys:
w, C = op(A, None)
self.assert_(C is A)
self.assertTrue(C is A)
self.assertEqual(w, 1)
w, C = op(A, None, 42, 666)
self.assert_(C is A)
self.assertTrue(C is A)
self.assertEqual(w, 42)
# If obj is a set, return a bucket with values all 1; else return obj.
def _normalize(self, obj):
if isaset(obj):
obj = self.mkbucket(zip(obj, [1] * len(obj)))
obj = self.mkbucket(list(zip(obj, [1] * len(obj))))
return obj
# Python simulation of weightedUnion.
......@@ -1796,12 +1821,13 @@ class MultiUnion(object):
self.assertEqual(len(self.multiunion([])), 0)
def testOne(self):
for sequence in [3], range(20), range(-10, 0, 2) + range(1, 10, 2):
for sequence in ([3],
list(range(20)),
list(range(-10, 0, 2)) + list(range(1, 10, 2)),
):
seq1 = sequence[:]
seq2 = sequence[:]
seq2.reverse()
seqsorted = sequence[:]
seqsorted.sort()
seq2 = list(reversed(sequence[:]))
seqsorted = sorted(sequence[:])
for seq in seq1, seq2, seqsorted:
for builder in self.mkset, self.mktreeset:
input = builder(seq)
......@@ -1817,12 +1843,12 @@ class MultiUnion(object):
def testBigInput(self):
N = 100000
input = self.mkset(range(N))
input = self.mkset(list(range(N)))
output = self.multiunion([input] * 10)
self.assertEqual(len(output), N)
self.assertEqual(output.minKey(), 0)
self.assertEqual(output.maxKey(), N-1)
self.assertEqual(list(output), range(N))
self.assertEqual(list(output), list(range(N)))
def testLotsOfLittleOnes(self):
from random import shuffle
......@@ -1836,7 +1862,7 @@ class MultiUnion(object):
shuffle(inputs)
output = self.multiunion(inputs)
self.assertEqual(len(output), N*4)
self.assertEqual(list(output), range(-N, 3*N))
self.assertEqual(list(output), list(range(-N, 3*N)))
def testFunkyKeyIteration(self):
# The internal set iteration protocol allows "iterating over" a
......@@ -1846,11 +1872,11 @@ class MultiUnion(object):
slow = mkset()
for i in range(N):
slow = union(slow, mkset([i]))
fast = self.multiunion(range(N)) # acts like N distinct singleton sets
fast = self.multiunion(list(range(N))) # like N distinct singleton sets
self.assertEqual(len(slow), N)
self.assertEqual(len(fast), N)
self.assertEqual(list(slow), list(fast))
self.assertEqual(list(fast), range(N))
self.assertEqual(list(fast), list(range(N)))
class ConflictTestBase(object):
......@@ -1890,7 +1916,7 @@ def _test_merge(o1, o2, o3, expect, message='failed to merge', should_fail=0):
if should_fail:
try:
merged = o1._p_resolveConflict(s1, s2, s3)
except BTreesConflictError, err:
except BTreesConflictError as err:
pass
else:
assert 0, message
......@@ -2143,9 +2169,8 @@ class SetConflictTestBase(ConflictTestBase):
def lsubtract(l1, l2):
l1 = list(l1)
l2 = list(l2)
l = filter(lambda x, l1=l1: x not in l1, l2)
l = l + filter(lambda x, l2=l2: x not in l2, l1)
return l
return (list(filter(lambda x, l1=l1: x not in l1, l2)) +
list(filter(lambda x, l2=l2: x not in l2, l1)))
def realseq(itemsob):
return [x for x in itemsob]
......
......@@ -121,7 +121,7 @@ class DegenerateBTree(unittest.TestCase):
self.assertEqual(t.has_key(7), 5)
self.assertEqual(t.has_key(11), 5)
for i in 0, 2, 4, 6, 8, 9, 10, 12:
self.assert_(i not in t)
self.assertTrue(i not in t)
def _checkRanges(self, tree, keys):
self.assertEqual(len(tree), len(keys))
......@@ -129,7 +129,7 @@ class DegenerateBTree(unittest.TestCase):
sorted_keys.sort()
self.assertEqual(list(tree.keys()), sorted_keys)
for k in keys:
self.assert_(k in tree)
self.assertTrue(k in tree)
if keys:
lokey = sorted_keys[0]
hikey = sorted_keys[-1]
......@@ -206,6 +206,24 @@ class ToBeDeleted(object):
def __cmp__(self, other):
return cmp(self.id, other.id)
def __le__(self, other):
return self.id <= other.id
def __lt__(self, other):
return self.id < other.id
def __eq__(self, other):
return self.id == other.id
def __ne__(self, other):
return self.id != other.id
def __gt__(self, other):
return self.id > other.id
def __ge__(self, other):
return self.id >= other.id
def __hash__(self):
return hash(self.id)
......@@ -236,6 +254,8 @@ class BugFixes(unittest.TestCase):
import gc
import random
from BTrees.OOBTree import OOBTree
from .._compat import _u
from .._compat import xrange
t = OOBTree()
......@@ -257,12 +277,12 @@ class BugFixes(unittest.TestCase):
t[id] = ToBeDeleted(id)
else:
#del
id = trandom.choice(ids.keys())
id = trandom.choice(list(ids.keys()))
del t[id]
del ids[id]
ids = ids.keys()
trandom.shuffle(ids)
trandom.shuffle(list(ids))
for id in ids:
del t[id]
ids = None
......@@ -287,15 +307,15 @@ class BugFixes(unittest.TestCase):
id = trandom.randint(0,1000000)
ids[id] = 1
t[id] = (id, ToBeDeleted(id), u'somename')
t[id] = (id, ToBeDeleted(id), _u('somename'))
else:
#del
id = trandom.choice(ids.keys())
id = trandom.choice(list(ids.keys()))
del t[id]
del ids[id]
ids = ids.keys()
trandom.shuffle(ids)
trandom.shuffle(list(ids))
for id in ids:
del t[id]
ids = None
......@@ -325,12 +345,12 @@ class BugFixes(unittest.TestCase):
t[id] = 1
else:
#del
id = trandom.choice(ids.keys())
id = trandom.choice(list(ids.keys()))
del ids[id]
del t[id]
ids = ids.keys()
trandom.shuffle(ids)
trandom.shuffle(list(ids))
for id in ids:
del t[id]
#release all refs
......@@ -354,18 +374,18 @@ class BugFixes(unittest.TestCase):
id = None
while id is None or id in ids:
id = trandom.randint(0,1000000)
id = (id, ToBeDeleted(id), u'somename')
id = (id, ToBeDeleted(id), _u('somename'))
ids[id] = 1
t[id] = 1
else:
#del
id = trandom.choice(ids.keys())
id = trandom.choice(list(ids.keys()))
del ids[id]
del t[id]
ids = ids.keys()
trandom.shuffle(ids)
trandom.shuffle(list(ids))
for id in ids:
del t[id]
#release all refs
......@@ -387,7 +407,7 @@ class DoesntLikeBeingCompared:
def __cmp__(self,other):
raise ValueError('incomparable')
__lt__ = __le__ = __eq__ = __ne__ = __ge__ = __gt__ = __cmp__
class TestCmpError(unittest.TestCase):
......@@ -397,7 +417,7 @@ class TestCmpError(unittest.TestCase):
t['hello world'] = None
try:
t[DoesntLikeBeingCompared()] = None
except ValueError,e:
except ValueError as e:
self.assertEqual(str(e), 'incomparable')
else:
self.fail('incomarable objects should not be allowed into '
......@@ -410,22 +430,22 @@ class FamilyTest(unittest.TestCase):
import BTrees
from BTrees.IOBTree import IOTreeSet
verifyObject(BTrees.Interfaces.IBTreeFamily, BTrees.family32)
self.assertEquals(
self.assertEqual(
BTrees.family32.IO, BTrees.IOBTree)
self.assertEquals(
self.assertEqual(
BTrees.family32.OI, BTrees.OIBTree)
self.assertEquals(
self.assertEqual(
BTrees.family32.II, BTrees.IIBTree)
self.assertEquals(
self.assertEqual(
BTrees.family32.IF, BTrees.IFBTree)
self.assertEquals(
self.assertEqual(
BTrees.family32.OO, BTrees.OOBTree)
s = IOTreeSet()
s.insert(BTrees.family32.maxint)
self.assert_(BTrees.family32.maxint in s)
self.assertTrue(BTrees.family32.maxint in s)
s = IOTreeSet()
s.insert(BTrees.family32.minint)
self.assert_(BTrees.family32.minint in s)
self.assertTrue(BTrees.family32.minint in s)
s = IOTreeSet()
# this next bit illustrates an, um, "interesting feature". If
# the characteristics change to match the 64 bit version, please
......@@ -440,22 +460,22 @@ class FamilyTest(unittest.TestCase):
import BTrees
from BTrees.LOBTree import LOTreeSet
verifyObject(BTrees.Interfaces.IBTreeFamily, BTrees.family64)
self.assertEquals(
self.assertEqual(
BTrees.family64.IO, BTrees.LOBTree)
self.assertEquals(
self.assertEqual(
BTrees.family64.OI, BTrees.OLBTree)
self.assertEquals(
self.assertEqual(
BTrees.family64.II, BTrees.LLBTree)
self.assertEquals(
self.assertEqual(
BTrees.family64.IF, BTrees.LFBTree)
self.assertEquals(
self.assertEqual(
BTrees.family64.OO, BTrees.OOBTree)
s = LOTreeSet()
s.insert(BTrees.family64.maxint)
self.assert_(BTrees.family64.maxint in s)
self.assertTrue(BTrees.family64.maxint in s)
s = LOTreeSet()
s.insert(BTrees.family64.minint)
self.assert_(BTrees.family64.minint in s)
self.assertTrue(BTrees.family64.minint in s)
s = LOTreeSet()
# XXX why oh why do we expect ValueError here, but TypeError in test32?
self.assertRaises(ValueError, s.insert, BTrees.family64.maxint + 1)
......@@ -468,35 +488,35 @@ class FamilyTest(unittest.TestCase):
# unpickling, whether from the same unpickler or different
# unpicklers.
import pickle
import StringIO
from .._compat import BytesIO
s = pickle.dumps((family, family))
(f1, f2) = pickle.loads(s)
self.failUnless(f1 is family)
self.failUnless(f2 is family)
self.assertTrue(f1 is family)
self.assertTrue(f2 is family)
# Using a single memo across multiple pickles:
sio = StringIO.StringIO()
sio = BytesIO()
p = pickle.Pickler(sio)
p.dump(family)
p.dump([family])
u = pickle.Unpickler(StringIO.StringIO(sio.getvalue()))
u = pickle.Unpickler(BytesIO(sio.getvalue()))
f1 = u.load()
f2, = u.load()
self.failUnless(f1 is family)
self.failUnless(f2 is family)
self.assertTrue(f1 is family)
self.assertTrue(f2 is family)
# Using separate memos for each pickle:
sio = StringIO.StringIO()
sio = BytesIO()
p = pickle.Pickler(sio)
p.dump(family)
p.clear_memo()
p.dump([family])
u = pickle.Unpickler(StringIO.StringIO(sio.getvalue()))
u = pickle.Unpickler(BytesIO(sio.getvalue()))
f1 = u.load()
f2, = u.load()
self.failUnless(f1 is family)
self.failUnless(f2 is family)
self.assertTrue(f1 is family)
self.assertTrue(f2 is family)
def test_suite():
......
......@@ -13,7 +13,7 @@
##############################################################################
import unittest
from BTrees.OOBTree import OOBTree
from .common import _skip_under_Py3k
# When an OOBtree contains unicode strings as keys,
# it is neccessary accessing non-unicode strings are
......@@ -26,33 +26,40 @@ class TestBTreesUnicode(unittest.TestCase):
""" test unicode"""
def setUp(self):
"""setup an OOBTree with some unicode strings"""
#setup an OOBTree with some unicode strings
from BTrees.OOBTree import OOBTree
from BTrees._compat import _bytes
from BTrees._compat import _u
self.s = unicode('dreit\xe4gigen', 'latin1')
self.s = _u(b'dreit\xe4gigen', 'latin1')
self.data = [('alien', 1),
('k\xf6nnten', 2),
('fox', 3),
('future', 4),
('quick', 5),
('zerst\xf6rt', 6),
(unicode('dreit\xe4gigen','latin1'), 7),
self.data = [(b'alien', 1),
(b'k\xf6nnten', 2),
(b'fox', 3),
(b'future', 4),
(b'quick', 5),
(b'zerst\xf6rt', 6),
(_u(b'dreit\xe4gigen','latin1'), 7),
]
self.tree = OOBTree()
for k, v in self.data:
if isinstance(k, str):
k = unicode(k, 'latin1')
if isinstance(k, _bytes):
k = _u(k, 'latin1')
self.tree[k] = v
@_skip_under_Py3k
def testAllKeys(self):
# check every item of the tree
from BTrees._compat import _u
from BTrees._compat import _bytes
for k, v in self.data:
if isinstance(k, str):
k = unicode(k, encoding)
self.assert_(self.tree.has_key(k))
if isinstance(k, _bytes):
k = _u(k, encoding)
self.assertTrue(k in self.tree)
self.assertEqual(self.tree[k], v)
@_skip_under_Py3k
def testUnicodeKeys(self):
# try to access unicode keys in tree
k, v = self.data[-1]
......@@ -60,10 +67,11 @@ class TestBTreesUnicode(unittest.TestCase):
self.assertEqual(self.tree[k], v)
self.assertEqual(self.tree[self.s], v)
@_skip_under_Py3k
def testAsciiKeys(self):
# try to access some "plain ASCII" keys in the tree
for k, v in self.data[0], self.data[2]:
self.assert_(isinstance(k, str))
self.assertTrue(isinstance(k, str))
self.assertEqual(self.tree[k], v)
def test_suite():
......
......@@ -136,7 +136,7 @@ class NastyConfictFunctionalTests(ConflictTestBase, unittest.TestCase):
numtoadd = 16
candidate = 60
while numtoadd:
if not b.has_key(candidate):
if candidate not in b:
b[candidate] = candidate
numtoadd -= 1
candidate += 1
......@@ -332,11 +332,11 @@ class NastyConfictFunctionalTests(ConflictTestBase, unittest.TestCase):
state1 = bucket.__getstate__()
state2 = bucket.__getstate__()
state3 = bucket.__getstate__()
self.assert_(state2 is not state1 and
state2 is not state3 and
state3 is not state1)
self.assert_(state2 == state1 and
state3 == state1)
self.assertTrue(state2 is not state1 and
state2 is not state3 and
state3 is not state1)
self.assertTrue(state2 == state1 and
state3 == state1)
self.assertRaises(BTreesConflictError, bucket._p_resolveConflict,
state1, state2, state3)
# When an empty BTree resolves conflicts, it computes the
......
......@@ -53,24 +53,42 @@ class LengthTestCase(unittest.TestCase):
length = self._makeOne()
self.assertEqual(length._p_resolveConflict(5, 7, 9), 11)
def test_change_w_positive_delta(self):
length = self._makeOne()
length.change(3)
self.assertEqual(length.value, 3)
def test_change_w_negative_delta(self):
length = self._makeOne()
length.change(-3)
self.assertEqual(length.value, -3)
def test_change_overflows_to_long(self):
import sys
length = self._makeOne(sys.maxint)
self.assertEqual(length(), sys.maxint)
self.assert_(type(length()) is int)
length.change(+1)
self.assertEqual(length(), sys.maxint + 1)
self.assert_(type(length()) is long)
try:
length = self._makeOne(sys.maxint)
except AttributeError: #pragma NO COVER Py3k
return
else: #pragma NO COVER Py2
self.assertEqual(length(), sys.maxint)
self.assertTrue(type(length()) is int)
length.change(+1)
self.assertEqual(length(), sys.maxint + 1)
self.assertTrue(type(length()) is long)
def test_change_underflows_to_long(self):
import sys
minint = (-sys.maxint) - 1
length = self._makeOne(minint)
self.assertEqual(length(), minint)
self.assert_(type(length()) is int)
length.change(-1)
self.assertEqual(length(), minint - 1)
self.assert_(type(length()) is long)
try:
minint = (-sys.maxint) - 1
except AttributeError: #pragma NO COVER Py3k
return
else: #pragma NO COVER Py2
length = self._makeOne(minint)
self.assertEqual(length(), minint)
self.assertTrue(type(length()) is int)
length.change(-1)
self.assertEqual(length(), minint - 1)
self.assertTrue(type(length()) is long)
def test___call___no_args(self):
length = self._makeOne(42)
......
......@@ -143,6 +143,7 @@ class _TestOIBTreesBase(TypeTest):
self._makeOne()[1] = None
def testEmptyFirstBucketReportedByGuido(self):
from .._compat import xrange
b = self._makeOne()
for i in xrange(29972): # reduce to 29971 and it works
b[i] = i
......
......@@ -112,6 +112,7 @@ class OOBTreeTest(BTreeTests, unittest.TestCase):
# used in a function that's used in lots of places.
# Otherwise, there are many permutations that would have to be
# checked.
from .._compat import PY2
t = self._makeOne()
class C(object):
......@@ -119,30 +120,31 @@ class OOBTreeTest(BTreeTests, unittest.TestCase):
self.assertRaises(TypeError, lambda : t.__setitem__(C(), 1))
class C(object):
def __cmp__(*args):
return 1
if PY2: # we only check for __cmp__ on Python2
c = C()
t[c] = 1
class With___cmp__(object):
def __cmp__(*args):
return 1
c = With___cmp__()
t[c] = 1
t.clear()
t.clear()
class C(object):
class With___lt__(object):
def __lt__(*args):
return 1
c = C()
c = With___lt__()
t[c] = 1
t.clear()
#class OOBTreePyTest(OOBTreeTest):
class OOBTreePyTest(OOBTreeTest):
#
# Right now, we can't match the C extension's test / prohibition of the
# default 'object' comparison semantics.
class OOBTreePyTest(BTreeTests, unittest.TestCase):
#class OOBTreePyTest(BTreeTests, unittest.TestCase):
def _makeOne(self):
from BTrees.OOBTree import OOBTreePy
......
......@@ -640,7 +640,7 @@ class BucketTests(unittest.TestCase):
bucket = self._makeOne()
for i, c in enumerate('abcdef'):
bucket[c] = i
self.assertEqual(bucket.values(), range(6))
self.assertEqual(bucket.values(), list(range(6)))
def test_values_filled_w_args(self):
bucket = self._makeOne()
......@@ -657,7 +657,7 @@ class BucketTests(unittest.TestCase):
bucket = self._makeOne()
for i, c in enumerate('abcdef'):
bucket[c] = i
self.assertEqual(list(bucket.itervalues()), range(6))
self.assertEqual(list(bucket.itervalues()), list(range(6)))
def test_itervalues_filled_w_args(self):
bucket = self._makeOne()
......@@ -1783,8 +1783,8 @@ class Test_Tree(unittest.TestCase):
def test__set_calls_readCurrent_on_jar(self):
tree = self._makeOne()
tree._p_oid = 'OID'
tree._p_serial = '01234567'
tree._p_oid = b'OID'
tree._p_serial = b'01234567'
tree._p_jar = jar = _Jar()
tree._set('a', 'b')
self.assertTrue(tree in jar._current)
......@@ -1839,8 +1839,8 @@ class Test_Tree(unittest.TestCase):
def test__del_calls_readCurrent_on_jar(self):
tree = self._makeOne({'a': 'b'})
tree._p_oid = 'OID'
tree._p_serial = '01234567'
tree._p_oid = b'OID'
tree._p_serial = b'01234567'
tree._p_jar = jar = _Jar()
tree._del('a')
self.assertTrue(tree in jar._current)
......@@ -1898,7 +1898,7 @@ class Test_Tree(unittest.TestCase):
bucket = tree._firstbucket
jar = _Jar()
bucket._p_jar = jar
bucket._p_oid = 'OID'
bucket._p_oid = b'OID'
self.assertEqual(tree.__getstate__(), ((bucket,), bucket))
def test___getstate___multiple_buckets(self):
......@@ -2273,7 +2273,7 @@ class TreeTests(unittest.TestCase):
def test_values_filled_no_args(self):
ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')]
tree = self._makeOne(ITEMS)
self.assertEqual(list(tree.values()), range(26))
self.assertEqual(list(tree.values()), list(range(26)))
def test_values_filled_w_args(self):
ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')]
......@@ -2289,7 +2289,7 @@ class TreeTests(unittest.TestCase):
def test_itervalues_filled_no_args(self):
ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')]
tree = self._makeOne(ITEMS)
self.assertEqual(list(tree.itervalues()), range(26))
self.assertEqual(list(tree.itervalues()), list(range(26)))
def test_itervalues_filled_w_args(self):
ITEMS = [(y, x) for x, y in enumerate('abcdefghijklmnopqrstuvwxyz')]
......@@ -2634,6 +2634,28 @@ class Test_weightedUnion(unittest.TestCase, _SetObBase):
rhs = self._makeSet('a', 'b', 'c')
self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs)
def test_lhs_mapping_wo_MERGE_DEFAULT_rhs_set(self):
class _MappingWoDefault(dict):
def MERGE(self, v1, w1, v2, w2):
return (v1 * w1) + (v2 * w2)
def MERGE_WEIGHT(self, v, w):
return v
lhs = _MappingWoDefault({'a': 13, 'b': 12, 'c': 11})
lhs._mapping_type = _MappingWoDefault
rhs = self._makeSet('a', 'b', 'c')
self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs)
def test_lhs_mapping_wo_MERGE_rhs_mapping(self):
class _MappingWoMerge(dict):
def MERGE_DEFAULT(self):
return 1
def MERGE_WEIGHT(self, v, w):
return v
lhs = _MappingWoMerge({'a': 13, 'b': 12, 'c': 11})
lhs._mapping_type = _MappingWoMerge
rhs = self._makeMapping({'a': 1, 'b': 2, 'c': 3})
self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs)
def test_lhs_set_wo_MERGE_DEFAULT_rhs_mapping(self):
lhs = self._makeSet('a', 'd')
lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2)
......@@ -2725,6 +2747,17 @@ class Test_weightedIntersection(unittest.TestCase, _SetObBase):
rhs = {'b': 22, 'd': 14}
self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs)
def test_lhs_mapping_wo_MERGE_rhs_mapping(self):
class _MappingWoMerge(dict):
def MERGE_DEFAULT(self):
return 1
def MERGE_WEIGHT(self, v, w):
return v
lhs = _MappingWoMerge({'a': 13, 'b': 12, 'c': 11})
lhs._mapping_type = _MappingWoMerge
rhs = self._makeMapping({'a': 1, 'b': 2, 'c': 3})
self.assertRaises(TypeError, self._callFUT, lhs.__class__, lhs, rhs)
def test_lhs_set_wo_MERGE_DEFAULT_rhs_set(self):
lhs = self._makeSet('a', 'd')
lhs.MERGE = lambda v1, w1, v2, w2: (v1 * w1) + (v2 * w2)
......@@ -2824,7 +2857,10 @@ class Test_helpers(unittest.TestCase):
import sys
from BTrees._base import to_int
faux_self = object()
self.assertRaises(TypeError, to_int, faux_self, sys.maxint + 1)
try:
self.assertRaises(TypeError, to_int, faux_self, sys.maxint + 1)
except AttributeError: #pragma NO COVER Py3k
pass
def test_to_int_w_invalid(self):
from BTrees._base import to_int
......@@ -2863,25 +2899,28 @@ class Test_helpers(unittest.TestCase):
import sys
from BTrees._base import to_long
faux_self = object()
self.assertRaises(ValueError, to_long, faux_self, sys.maxint + 1)
try:
self.assertRaises(ValueError, to_long, faux_self, sys.maxint + 1)
except AttributeError: #pragma NO COVER Py3k
pass
def test_to_long_w_invalid(self):
from BTrees._base import to_long
faux_self = object()
self.assertRaises(TypeError, to_long, faux_self, ())
def test_to_str_w_ok(self):
from BTrees._base import to_str
def test_to_bytes_w_ok(self):
from BTrees._base import to_bytes
faux_self = object()
conv = to_str(3)
self.assertEqual(conv(faux_self, 'abc'), 'abc')
conv = to_bytes(3)
self.assertEqual(conv(faux_self, b'abc'), b'abc')
def test_to_str_w_invalid_length(self):
from BTrees._base import to_str
def test_to_bytes_w_invalid_length(self):
from BTrees._base import to_bytes
faux_self = object()
conv = to_str(3)
self.assertRaises(TypeError, conv, faux_self, 'ab')
self.assertRaises(TypeError, conv, faux_self, 'abcd')
conv = to_bytes(3)
self.assertRaises(TypeError, conv, faux_self, b'ab')
self.assertRaises(TypeError, conv, faux_self, b'abcd')
def test_MERGE(self):
from BTrees._base import MERGE
......
......@@ -38,7 +38,7 @@ class SubclassTest(unittest.TestCase):
t[i] = i
state = t.__getstate__()
self.assert_(state[0][0].__class__ is B)
self.assertTrue(state[0][0].__class__ is B)
def test_suite():
return unittest.makeSuite(SubclassTest)
......@@ -149,10 +149,10 @@ class Test_type_and_adr(unittest.TestCase):
def test_type_and_adr_w_oid(self):
from BTrees.utils import oid_repr
class WithOid(object):
_p_oid = 'DEADBEEF'
_p_oid = b'DEADBEEF'
t_and_a = self._callFUT(WithOid())
self.assertTrue(t_and_a.startswith('WithOid (0x'))
self.assertTrue(t_and_a.endswith('oid=%s)' % oid_repr('DEADBEEF')))
self.assertTrue(t_and_a.endswith('oid=%s)' % oid_repr(b'DEADBEEF')))
def test_type_and_adr_wo_oid(self):
class WithoutOid(object):
......
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test errors during comparison of BTree keys."""
import unittest
STR = "A string with hi-bit-set characters: \700\701"
UNICODE = u"A unicode string"
class CompareTest(unittest.TestCase):
def setUp(self):
# These defaults only make sense if the default encoding
# prevents STR from being promoted to Unicode.
self.assertRaises(UnicodeError, unicode, STR)
def _makeBucket(self):
from BTrees.OOBTree import OOBucket
return OOBucket()
def _makeSet(self):
from BTrees.OOBTree import OOSet
return OOSet()
def assertUE(self, callable, *args):
self.assertRaises(UnicodeError, callable, *args)
def testBucketGet(self):
import warnings
b = self._makeBucket()
with warnings.catch_warnings(True) as _warnlog:
b[STR] = 1
self.assertUE(b.get, UNICODE)
self.assertEqual(len(_warnlog), 1)
def testSetGet(self):
s = self._makeSet()
s.insert(STR)
self.assertUE(s.remove, UNICODE)
def testBucketSet(self):
b = self._makeBucket()
b[STR] = 1
self.assertUE(b.__setitem__, UNICODE, 1)
def testSetSet(self):
s = self._makeSet()
s.insert(STR)
self.assertUE(s.insert, UNICODE)
def testBucketMinKey(self):
b = self._makeBucket()
b[STR] = 1
self.assertUE(b.minKey, UNICODE)
def testSetMinKey(self):
s = self._makeSet()
s.insert(STR)
self.assertUE(s.minKey, UNICODE)
def test_suite():
return unittest.makeSuite(CompareTest)
......@@ -14,54 +14,51 @@
import unittest
class fsBucketTests(unittest.TestCase):
def _getTargetClass(self):
from BTrees.fsBTree import fsBucket
return fsBucket
class fsBucketBase(object):
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_MERGE_WEIGHT(self):
bucket = self._makeOne()
self.assertEqual(bucket.MERGE_WEIGHT(42, 17), 42)
def _makeBytesItems(self):
from .._compat import _ascii
return[(_ascii(c*2), _ascii(c*6)) for c in 'abcdef']
def test_toString(self):
bucket = self._makeOne([(c*2, c*6) for c in 'abcdef'])
bucket = self._makeOne(self._makeBytesItems())
self.assertEqual(bucket.toString(),
'aabbccddeeffaaaaaabbbbbbccccccddddddeeeeeeffffff')
b'aabbccddeeffaaaaaabbbbbbccccccddddddeeeeeeffffff')
def test_fromString(self):
before = self._makeOne([(c*2, c*6) for c in 'abcdef'])
before = self._makeOne(self._makeBytesItems())
after = before.fromString(before.toString())
self.assertEqual(before.__getstate__(), after.__getstate__())
def test_fromString_empty(self):
before = self._makeOne([(c*2, c*6) for c in 'abcdef'])
after = before.fromString('')
before = self._makeOne(self._makeBytesItems())
after = before.fromString(b'')
self.assertEqual(after.__getstate__(), ((),))
def test_fromString_invalid(self):
bucket = self._makeOne([(c*2, c*6) for c in 'abcdef'])
self.assertRaises(ValueError, bucket.fromString, 'xxx')
def test_fromString_invalid_length(self):
bucket = self._makeOne(self._makeBytesItems())
self.assertRaises(ValueError, bucket.fromString, b'xxx')
class fsBTreeTests(unittest.TestCase):
class fsBucketTests(unittest.TestCase, fsBucketBase):
def _getTargetClass(self):
from BTrees.fsBTree import fsBTree
return fsBTree
from BTrees.fsBTree import fsBucket
return fsBucket
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_MERGE_WEIGHT(self):
bucket = self._makeOne()
self.assertEqual(bucket.MERGE_WEIGHT(42, 17), 42)
class fsBucketPyTests(unittest.TestCase, fsBucketBase):
def _getTargetClass(self):
from BTrees.fsBTree import fsBucketPy
return fsBucketPy
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(fsBucketTests),
unittest.makeSuite(fsBucketPyTests),
))
......@@ -22,11 +22,17 @@ class Test_non_negative(unittest.TestCase):
def test_w_big_negative(self):
import sys
self.assertEqual(self._callFUT(-sys.maxint), 1)
try:
self.assertEqual(self._callFUT(-sys.maxint), 1)
except AttributeError: #pragma NO COVER Py3k
pass
def test_w_negative(self):
import sys
self.assertEqual(self._callFUT(-1), sys.maxint)
try:
self.assertEqual(self._callFUT(-1), sys.maxint)
except AttributeError: #pragma NO COVER Py3k
pass
def test_w_zero(self):
self.assertEqual(self._callFUT(0), 0)
......@@ -36,7 +42,10 @@ class Test_non_negative(unittest.TestCase):
def test_w_big_positive(self):
import sys
self.assertEqual(self._callFUT(sys.maxint), sys.maxint)
try:
self.assertEqual(self._callFUT(sys.maxint), sys.maxint)
except AttributeError: #pragma NO COVER Py3k
pass
class Test_oid_repr(unittest.TestCase):
......@@ -63,16 +72,16 @@ class Test_oid_repr(unittest.TestCase):
self.assertEqual(self._callFUT(faux), repr(faux))
def test_w_zero(self):
self.assertEqual(self._callFUT('\0\0\0\0\0\0\0\0'), '0x00')
self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\0\0'), b'0x00')
def test_w_one(self):
self.assertEqual(self._callFUT('\0\0\0\0\0\0\0\1'), '0x01')
self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\0\1'), b'0x01')
def test_w_even_length(self):
self.assertEqual(self._callFUT('\0\0\0\0\0\0\xAB\xC4'), '0xabc4')
self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\xAB\xC4'), b'0xabc4')
def test_w_odd_length(self):
self.assertEqual(self._callFUT('\0\0\0\0\0\0\x0D\xEF'), '0x0def')
self.assertEqual(self._callFUT(b'\0\0\0\0\0\0\x0D\xEF'), b'0x0def')
def test_suite():
......
......@@ -15,6 +15,8 @@
from binascii import hexlify
from ._compat import _bytes
def non_negative(int_val):
if int_val < 0:
# Coerce to non-negative.
......@@ -28,14 +30,16 @@ def positive_id(obj): #pragma NO COVER
def oid_repr(oid):
if isinstance(oid, str) and len(oid) == 8:
if isinstance(oid, _bytes) and len(oid) == 8:
# Convert to hex and strip leading zeroes.
as_hex = hexlify(oid).lstrip('0')
as_hex = hexlify(oid).lstrip(b'0')
# Ensure two characters per input byte.
chunks = [b'0x']
if len(as_hex) & 1:
as_hex = '0' + as_hex
elif as_hex == '':
as_hex = '00'
return '0x' + as_hex
chunks.append(b'0')
elif as_hex == b'':
as_hex = b'00'
chunks.append(as_hex)
return b''.join(chunks)
else:
return repr(oid)
......@@ -5,6 +5,9 @@
4.0.2 (unreleased)
------------------
- Added explicit support for Python 3.2, Python 3.3, and PyPy.
Note that the C extensions are not (yet) available on PyPy.
- Python reference implementations now tested separately from the C
verions on all platforms.
......
......@@ -100,16 +100,22 @@ is_jython = 'java' in sys.platform
# Jython cannot build the C optimizations, while on PyPy they are
# anti-optimizations (the C extension compatibility layer is known-slow,
# and defeats JIT opportunities).
if pure_python or is_pypy or is_jython or sys.version_info[0] > 2:
if pure_python or is_pypy or is_jython:
ext_modules = []
else:
ext_modules = [BTreeExtension(family) for family in FAMILIES]
REQUIRES = [
'persistent',
'zope.interface',
]
if sys.version_info[0] > 3:
REQUIRES = [
'persistent>=4.0.4',
'zope.interface',
]
else:
REQUIRES = [
'persistent',
'zope.interface',
]
TESTS_REQUIRE = REQUIRES + ['transaction']
setup(name='BTrees',
......@@ -123,8 +129,9 @@ setup(name='BTrees',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
#'Programming Language :: Python :: 3',
#'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Database",
......
......@@ -3,7 +3,7 @@ envlist =
# Jython support pending 2.7 support, due 2012-07-15 or so. See:
# http://fwierzbicki.blogspot.com/2012/03/adconion-to-fund-jython-27.html
# py26,py27,py32,jython,pypy,coverage,docs
py26,py27,pypy,w_zodb,coverage,docs
py26,py27,pypy,py32,py33,w_zodb,coverage,docs
[testenv]
deps =
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment