Commit 5364296a authored by Tres Seaver's avatar Tres Seaver

Newer tip.

parent 1d904037
===================
Persistence support
===================
(This document is under construction. More basic documentation will eventually
appear here.)
Overriding `__getattr__`, `__getattribute__`, `__setattr__`, and `__delattr__`
------------------------------------------------------------------------------
Subclasses can override the attribute-management methods. For the
`__getattr__` method, the behavior is like that for regular Python
classes and for earlier versions of ZODB 3.
For `__getattribute__`, __setattr__`, and `__delattr__`, it is necessary
to call certain methods defined by `persistent.Persistent`. Detailed
examples and documentation is provided in the test module,
`persistent.tests.test_overriding_attrs`.
/*****************************************************************************
Copyright (c) 2001, 2004 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
#include "Python.h"
#include <time.h>
PyObject *TimeStamp_FromDate(int, int, int, int, int, double);
PyObject *TimeStamp_FromString(const char *);
static char TimeStampModule_doc[] =
"A 64-bit TimeStamp used as a ZODB serial number.\n"
"\n"
"$Id$\n";
typedef struct {
PyObject_HEAD
unsigned char data[8];
} TimeStamp;
/* The first dimension of the arrays below is non-leapyear / leapyear */
static char month_len[2][12]={
{31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31},
{31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}
};
static short joff[2][12] = {
{0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334},
{0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335}
};
static double gmoff=0;
/* TODO: May be better (faster) to store in a file static. */
#define SCONV ((double)60) / ((double)(1<<16)) / ((double)(1<<16))
static int
leap(int year)
{
return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
}
static int
days_in_month(int year, int month)
{
return month_len[leap(year)][month];
}
static double
TimeStamp_yad(int y)
{
double d, s;
y -= 1900;
d = (y - 1) * 365;
if (y > 0) {
s = 1.0;
y -= 1;
} else {
s = -1.0;
y = -y;
}
return d + s * (y / 4 - y / 100 + (y + 300) / 400);
}
static double
TimeStamp_abst(int y, int mo, int d, int m, int s)
{
return (TimeStamp_yad(y) + joff[leap(y)][mo] + d) * 86400 + m * 60 + s;
}
static int
TimeStamp_init_gmoff(void)
{
struct tm *t;
time_t z=0;
t = gmtime(&z);
if (t == NULL) {
PyErr_SetString(PyExc_SystemError, "gmtime failed");
return -1;
}
gmoff = TimeStamp_abst(t->tm_year+1900, t->tm_mon, t->tm_mday - 1,
t->tm_hour * 60 + t->tm_min, t->tm_sec);
return 0;
}
static void
TimeStamp_dealloc(TimeStamp *ts)
{
PyObject_Del(ts);
}
static int
TimeStamp_compare(TimeStamp *v, TimeStamp *w)
{
int cmp = memcmp(v->data, w->data, 8);
if (cmp < 0) return -1;
if (cmp > 0) return 1;
return 0;
}
static long
TimeStamp_hash(TimeStamp *self)
{
register unsigned char *p = (unsigned char *)self->data;
register int len = 8;
register long x = *p << 7;
while (--len >= 0)
x = (1000003*x) ^ *p++;
x ^= 8;
if (x == -1)
x = -2;
return x;
}
typedef struct {
/* TODO: reverse-engineer what's in these things and comment them */
int y;
int m;
int d;
int mi;
} TimeStampParts;
static void
TimeStamp_unpack(TimeStamp *self, TimeStampParts *p)
{
unsigned long v;
v = (self->data[0] * 16777216 + self->data[1] * 65536
+ self->data[2] * 256 + self->data[3]);
p->y = v / 535680 + 1900;
p->m = (v % 535680) / 44640 + 1;
p->d = (v % 44640) / 1440 + 1;
p->mi = v % 1440;
}
static double
TimeStamp_sec(TimeStamp *self)
{
unsigned int v;
v = (self->data[4] * 16777216 + self->data[5] * 65536
+ self->data[6] * 256 + self->data[7]);
return SCONV * v;
}
static PyObject *
TimeStamp_year(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyInt_FromLong(p.y);
}
static PyObject *
TimeStamp_month(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyInt_FromLong(p.m);
}
static PyObject *
TimeStamp_day(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyInt_FromLong(p.d);
}
static PyObject *
TimeStamp_hour(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyInt_FromLong(p.mi / 60);
}
static PyObject *
TimeStamp_minute(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyInt_FromLong(p.mi % 60);
}
static PyObject *
TimeStamp_second(TimeStamp *self)
{
return PyFloat_FromDouble(TimeStamp_sec(self));
}
static PyObject *
TimeStamp_timeTime(TimeStamp *self)
{
TimeStampParts p;
TimeStamp_unpack(self, &p);
return PyFloat_FromDouble(TimeStamp_abst(p.y, p.m - 1, p.d - 1, p.mi, 0)
+ TimeStamp_sec(self) - gmoff);
}
static PyObject *
TimeStamp_raw(TimeStamp *self)
{
return PyString_FromStringAndSize((const char*)self->data, 8);
}
static PyObject *
TimeStamp_str(TimeStamp *self)
{
char buf[128];
TimeStampParts p;
int len;
TimeStamp_unpack(self, &p);
len =sprintf(buf, "%4.4d-%2.2d-%2.2d %2.2d:%2.2d:%09.6f",
p.y, p.m, p.d, p.mi / 60, p.mi % 60,
TimeStamp_sec(self));
return PyString_FromStringAndSize(buf, len);
}
static PyObject *
TimeStamp_laterThan(TimeStamp *self, PyObject *obj)
{
TimeStamp *o = NULL;
TimeStampParts p;
unsigned char new[8];
int i;
if (obj->ob_type != self->ob_type) {
PyErr_SetString(PyExc_TypeError, "expected TimeStamp object");
return NULL;
}
o = (TimeStamp *)obj;
if (memcmp(self->data, o->data, 8) > 0) {
Py_INCREF(self);
return (PyObject *)self;
}
memcpy(new, o->data, 8);
for (i = 7; i > 3; i--) {
if (new[i] == 255)
new[i] = 0;
else {
new[i]++;
return TimeStamp_FromString((const char*)new);
}
}
/* All but the first two bytes are the same. Need to increment
the year, month, and day explicitly. */
TimeStamp_unpack(o, &p);
if (p.mi >= 1439) {
p.mi = 0;
if (p.d == month_len[leap(p.y)][p.m - 1]) {
p.d = 1;
if (p.m == 12) {
p.m = 1;
p.y++;
} else
p.m++;
} else
p.d++;
} else
p.mi++;
return TimeStamp_FromDate(p.y, p.m, p.d, p.mi / 60, p.mi % 60, 0);
}
static struct PyMethodDef TimeStamp_methods[] = {
{"year", (PyCFunction)TimeStamp_year, METH_NOARGS},
{"minute", (PyCFunction)TimeStamp_minute, METH_NOARGS},
{"month", (PyCFunction)TimeStamp_month, METH_NOARGS},
{"day", (PyCFunction)TimeStamp_day, METH_NOARGS},
{"hour", (PyCFunction)TimeStamp_hour, METH_NOARGS},
{"second", (PyCFunction)TimeStamp_second, METH_NOARGS},
{"timeTime",(PyCFunction)TimeStamp_timeTime, METH_NOARGS},
{"laterThan", (PyCFunction)TimeStamp_laterThan, METH_O},
{"raw", (PyCFunction)TimeStamp_raw, METH_NOARGS},
{NULL, NULL},
};
static PyTypeObject TimeStamp_type = {
PyObject_HEAD_INIT(NULL)
0,
"persistent.TimeStamp",
sizeof(TimeStamp),
0,
(destructor)TimeStamp_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
(cmpfunc)TimeStamp_compare, /* tp_compare */
(reprfunc)TimeStamp_raw, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
(hashfunc)TimeStamp_hash, /* tp_hash */
0, /* tp_call */
(reprfunc)TimeStamp_str, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
TimeStamp_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
};
PyObject *
TimeStamp_FromString(const char *buf)
{
/* buf must be exactly 8 characters */
TimeStamp *ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type);
memcpy(ts->data, buf, 8);
return (PyObject *)ts;
}
#define CHECK_RANGE(VAR, LO, HI) if ((VAR) < (LO) || (VAR) > (HI)) { \
return PyErr_Format(PyExc_ValueError, \
# VAR " must be between %d and %d: %d", \
(LO), (HI), (VAR)); \
}
PyObject *
TimeStamp_FromDate(int year, int month, int day, int hour, int min,
double sec)
{
TimeStamp *ts = NULL;
int d;
unsigned int v;
if (year < 1900)
return PyErr_Format(PyExc_ValueError,
"year must be greater than 1900: %d", year);
CHECK_RANGE(month, 1, 12);
d = days_in_month(year, month - 1);
if (day < 1 || day > d)
return PyErr_Format(PyExc_ValueError,
"day must be between 1 and %d: %d", d, day);
CHECK_RANGE(hour, 0, 23);
CHECK_RANGE(min, 0, 59);
/* Seconds are allowed to be anything, so chill
If we did want to be pickly, 60 would be a better choice.
if (sec < 0 || sec > 59)
return PyErr_Format(PyExc_ValueError,
"second must be between 0 and 59: %f", sec);
*/
ts = (TimeStamp *)PyObject_New(TimeStamp, &TimeStamp_type);
v = (((year - 1900) * 12 + month - 1) * 31 + day - 1);
v = (v * 24 + hour) * 60 + min;
ts->data[0] = v / 16777216;
ts->data[1] = (v % 16777216) / 65536;
ts->data[2] = (v % 65536) / 256;
ts->data[3] = v % 256;
sec /= SCONV;
v = (unsigned int)sec;
ts->data[4] = v / 16777216;
ts->data[5] = (v % 16777216) / 65536;
ts->data[6] = (v % 65536) / 256;
ts->data[7] = v % 256;
return (PyObject *)ts;
}
PyObject *
TimeStamp_TimeStamp(PyObject *obj, PyObject *args)
{
char *buf = NULL;
int len = 0, y, mo, d, h = 0, m = 0;
double sec = 0;
if (PyArg_ParseTuple(args, "s#:TimeStamp", &buf, &len)) {
if (len != 8) {
PyErr_SetString(PyExc_ValueError, "8-character string expected");
return NULL;
}
return TimeStamp_FromString(buf);
}
PyErr_Clear();
if (!PyArg_ParseTuple(args, "iii|iid", &y, &mo, &d, &h, &m, &sec))
return NULL;
return TimeStamp_FromDate(y, mo, d, h, m, sec);
}
static PyMethodDef TimeStampModule_functions[] = {
{"TimeStamp", TimeStamp_TimeStamp, METH_VARARGS},
{NULL, NULL},
};
void
initTimeStamp(void)
{
PyObject *m;
if (TimeStamp_init_gmoff() < 0)
return;
m = Py_InitModule4("TimeStamp", TimeStampModule_functions,
TimeStampModule_doc, NULL, PYTHON_API_VERSION);
if (m == NULL)
return;
TimeStamp_type.ob_type = &PyType_Type;
TimeStamp_type.tp_getattro = PyObject_GenericGetAttr;
}
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Provide access to Persistent and PersistentMapping.
$Id$
"""
try:
from cPersistence import Persistent
from cPersistence import GHOST
from cPersistence import UPTODATE
from cPersistence import CHANGED
from cPersistence import STICKY
from cPersistence import simple_new
except ImportError: # XXX need pure-Python fallback
_HAVE_CPERSISTECE = False
from pyPersistence import Persistent
from pyPersistence import GHOST
from pyPersistence import UPTODATE
from pyPersistence import CHANGED
from pyPersistence import STICKY
else:
_HAVE_CPERSISTECE = True
import copy_reg
copy_reg.constructor(simple_new)
try:
from cPickleCache import PickleCache
except ImportError:
from picklecache import PickleCache
if _HAVE_CPERSISTECE:
# Make an interface declaration for Persistent, if zope.interface
# is available. XXX that the pyPersistent version already does this?
try:
from zope.interface import classImplements
except ImportError:
pass
else:
from persistent.interfaces import IPersistent
classImplements(Persistent, IPersistent)
/*****************************************************************************
Copyright (c) 2001, 2002 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
static char cPersistence_doc_string[] =
"Defines Persistent mixin class for persistent objects.\n"
"\n"
"$Id$\n";
#include "cPersistence.h"
#include "structmember.h"
struct ccobject_head_struct {
CACHE_HEAD
};
/* These two objects are initialized when the module is loaded */
static PyObject *TimeStamp, *py_simple_new;
/* Strings initialized by init_strings() below. */
static PyObject *py_keys, *py_setstate, *py___dict__, *py_timeTime;
static PyObject *py__p_changed, *py__p_deactivate;
static PyObject *py___getattr__, *py___setattr__, *py___delattr__;
static PyObject *py___slotnames__, *copy_reg_slotnames, *__newobj__;
static PyObject *py___getnewargs__, *py___getstate__;
static int
init_strings(void)
{
#define INIT_STRING(S) \
if (!(py_ ## S = PyString_InternFromString(#S))) \
return -1;
INIT_STRING(keys);
INIT_STRING(setstate);
INIT_STRING(timeTime);
INIT_STRING(__dict__);
INIT_STRING(_p_changed);
INIT_STRING(_p_deactivate);
INIT_STRING(__getattr__);
INIT_STRING(__setattr__);
INIT_STRING(__delattr__);
INIT_STRING(__slotnames__);
INIT_STRING(__getnewargs__);
INIT_STRING(__getstate__);
#undef INIT_STRING
return 0;
}
#ifdef Py_DEBUG
static void
fatal_1350(cPersistentObject *self, const char *caller, const char *detail)
{
char buf[1000];
PyOS_snprintf(buf, sizeof(buf),
"cPersistence.c %s(): object at %p with type %.200s\n"
"%s.\n"
"The only known cause is multiple threads trying to ghost and\n"
"unghost the object simultaneously.\n"
"That's not legal, but ZODB can't stop it.\n"
"See Collector #1350.\n",
caller, self, self->ob_type->tp_name, detail);
Py_FatalError(buf);
}
#endif
static void ghostify(cPersistentObject*);
/* Load the state of the object, unghostifying it. Upon success, return 1.
* If an error occurred, re-ghostify the object and return -1.
*/
static int
unghostify(cPersistentObject *self)
{
if (self->state < 0 && self->jar)
{
PyObject *r;
/* Is it ever possible to not have a cache? */
if (self->cache)
{
/* Create a node in the ring for this unghostified object. */
self->cache->non_ghost_count++;
self->cache->total_estimated_size +=
_estimated_size_in_bytes(self->estimated_size);
ring_add(&self->cache->ring_home, &self->ring);
Py_INCREF(self);
}
/* set state to CHANGED while setstate() call is in progress
to prevent a recursive call to _PyPersist_Load().
*/
self->state = cPersistent_CHANGED_STATE;
/* Call the object's __setstate__() */
r = PyObject_CallMethod(self->jar, "setstate", "O", (PyObject *)self);
if (r == NULL)
{
ghostify(self);
return -1;
}
self->state = cPersistent_UPTODATE_STATE;
Py_DECREF(r);
if (self->cache && self->ring.r_next == NULL)
{
#ifdef Py_DEBUG
fatal_1350(self, "unghostify",
"is not in the cache despite that we just "
"unghostified it");
#else
PyErr_Format(PyExc_SystemError, "object at %p with type "
"%.200s not in the cache despite that we just "
"unghostified it", self, self->ob_type->tp_name);
return -1;
#endif
}
}
return 1;
}
/****************************************************************************/
static PyTypeObject Pertype;
static void
accessed(cPersistentObject *self)
{
/* Do nothing unless the object is in a cache and not a ghost. */
if (self->cache && self->state >= 0 && self->ring.r_next)
ring_move_to_head(&self->cache->ring_home, &self->ring);
}
static void
ghostify(cPersistentObject *self)
{
PyObject **dictptr;
/* are we already a ghost? */
if (self->state == cPersistent_GHOST_STATE)
return;
/* Is it ever possible to not have a cache? */
if (self->cache == NULL)
{
self->state = cPersistent_GHOST_STATE;
return;
}
if (self->ring.r_next == NULL)
{
/* There's no way to raise an error in this routine. */
#ifdef Py_DEBUG
fatal_1350(self, "ghostify", "claims to be in a cache but isn't");
#else
return;
#endif
}
/* If we're ghostifying an object, we better have some non-ghosts. */
assert(self->cache->non_ghost_count > 0);
self->cache->non_ghost_count--;
self->cache->total_estimated_size -=
_estimated_size_in_bytes(self->estimated_size);
ring_del(&self->ring);
self->state = cPersistent_GHOST_STATE;
dictptr = _PyObject_GetDictPtr((PyObject *)self);
if (dictptr && *dictptr)
{
Py_DECREF(*dictptr);
*dictptr = NULL;
}
/* We remove the reference to the just ghosted object that the ring
* holds. Note that the dictionary of oids->objects has an uncounted
* reference, so if the ring's reference was the only one, this frees
* the ghost object. Note further that the object's dealloc knows to
* inform the dictionary that it is going away.
*/
Py_DECREF(self);
}
static int
changed(cPersistentObject *self)
{
if ((self->state == cPersistent_UPTODATE_STATE ||
self->state == cPersistent_STICKY_STATE)
&& self->jar)
{
PyObject *meth, *arg, *result;
static PyObject *s_register;
if (s_register == NULL)
s_register = PyString_InternFromString("register");
meth = PyObject_GetAttr((PyObject *)self->jar, s_register);
if (meth == NULL)
return -1;
arg = PyTuple_New(1);
if (arg == NULL)
{
Py_DECREF(meth);
return -1;
}
Py_INCREF(self);
PyTuple_SET_ITEM(arg, 0, (PyObject *)self);
result = PyEval_CallObject(meth, arg);
Py_DECREF(arg);
Py_DECREF(meth);
if (result == NULL)
return -1;
Py_DECREF(result);
self->state = cPersistent_CHANGED_STATE;
}
return 0;
}
static int
readCurrent(cPersistentObject *self)
{
if ((self->state == cPersistent_UPTODATE_STATE ||
self->state == cPersistent_STICKY_STATE)
&& self->jar && self->oid)
{
static PyObject *s_readCurrent=NULL;
PyObject *r;
if (s_readCurrent == NULL)
s_readCurrent = PyString_InternFromString("readCurrent");
r = PyObject_CallMethodObjArgs(self->jar, s_readCurrent, self, NULL);
if (r == NULL)
return -1;
Py_DECREF(r);
}
return 0;
}
static PyObject *
Per__p_deactivate(cPersistentObject *self)
{
if (self->state == cPersistent_UPTODATE_STATE && self->jar)
{
PyObject **dictptr = _PyObject_GetDictPtr((PyObject *)self);
if (dictptr && *dictptr)
{
Py_DECREF(*dictptr);
*dictptr = NULL;
}
/* Note that we need to set to ghost state unless we are
called directly. Methods that override this need to
do the same! */
ghostify(self);
}
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
Per__p_activate(cPersistentObject *self)
{
if (unghostify(self) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static int Per_set_changed(cPersistentObject *self, PyObject *v);
static PyObject *
Per__p_invalidate(cPersistentObject *self)
{
signed char old_state = self->state;
if (old_state != cPersistent_GHOST_STATE)
{
if (Per_set_changed(self, NULL) < 0)
return NULL;
ghostify(self);
}
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
pickle_slotnames(PyTypeObject *cls)
{
PyObject *slotnames;
slotnames = PyDict_GetItem(cls->tp_dict, py___slotnames__);
if (slotnames)
{
int n = PyObject_Not(slotnames);
if (n < 0)
return NULL;
if (n)
slotnames = Py_None;
Py_INCREF(slotnames);
return slotnames;
}
slotnames = PyObject_CallFunctionObjArgs(copy_reg_slotnames,
(PyObject*)cls, NULL);
if (slotnames && !(slotnames == Py_None || PyList_Check(slotnames)))
{
PyErr_SetString(PyExc_TypeError,
"copy_reg._slotnames didn't return a list or None");
Py_DECREF(slotnames);
return NULL;
}
return slotnames;
}
static PyObject *
pickle_copy_dict(PyObject *state)
{
PyObject *copy, *key, *value;
char *ckey;
Py_ssize_t pos = 0;
copy = PyDict_New();
if (!copy)
return NULL;
if (!state)
return copy;
while (PyDict_Next(state, &pos, &key, &value))
{
if (key && PyString_Check(key))
{
ckey = PyString_AS_STRING(key);
if (*ckey == '_' &&
(ckey[1] == 'v' || ckey[1] == 'p') &&
ckey[2] == '_')
/* skip volatile and persistent */
continue;
}
if (PyObject_SetItem(copy, key, value) < 0)
goto err;
}
return copy;
err:
Py_DECREF(copy);
return NULL;
}
static char pickle___getstate__doc[] =
"Get the object serialization state\n"
"\n"
"If the object has no assigned slots and has no instance dictionary, then \n"
"None is returned.\n"
"\n"
"If the object has no assigned slots and has an instance dictionary, then \n"
"the a copy of the instance dictionary is returned. The copy has any items \n"
"with names starting with '_v_' or '_p_' ommitted.\n"
"\n"
"If the object has assigned slots, then a two-element tuple is returned. \n"
"The first element is either None or a copy of the instance dictionary, \n"
"as described above. The second element is a dictionary with items \n"
"for each of the assigned slots.\n"
;
static PyObject *
pickle___getstate__(PyObject *self)
{
PyObject *slotnames=NULL, *slots=NULL, *state=NULL;
PyObject **dictp;
int n=0;
slotnames = pickle_slotnames(self->ob_type);
if (!slotnames)
return NULL;
dictp = _PyObject_GetDictPtr(self);
if (dictp)
state = pickle_copy_dict(*dictp);
else
{
state = Py_None;
Py_INCREF(state);
}
if (slotnames != Py_None)
{
int i;
slots = PyDict_New();
if (!slots)
goto end;
for (i = 0; i < PyList_GET_SIZE(slotnames); i++)
{
PyObject *name, *value;
char *cname;
name = PyList_GET_ITEM(slotnames, i);
if (PyString_Check(name))
{
cname = PyString_AS_STRING(name);
if (*cname == '_' &&
(cname[1] == 'v' || cname[1] == 'p') &&
cname[2] == '_')
/* skip volatile and persistent */
continue;
}
/* Unclear: Will this go through our getattr hook? */
value = PyObject_GetAttr(self, name);
if (value == NULL)
PyErr_Clear();
else
{
int err = PyDict_SetItem(slots, name, value);
Py_DECREF(value);
if (err < 0)
goto end;
n++;
}
}
}
if (n)
state = Py_BuildValue("(NO)", state, slots);
end:
Py_XDECREF(slotnames);
Py_XDECREF(slots);
return state;
}
static int
pickle_setattrs_from_dict(PyObject *self, PyObject *dict)
{
PyObject *key, *value;
Py_ssize_t pos = 0;
if (!PyDict_Check(dict))
{
PyErr_SetString(PyExc_TypeError, "Expected dictionary");
return -1;
}
while (PyDict_Next(dict, &pos, &key, &value))
{
if (PyObject_SetAttr(self, key, value) < 0)
return -1;
}
return 0;
}
static char pickle___setstate__doc[] =
"Set the object serialization state\n\n"
"The state should be in one of 3 forms:\n\n"
"- None\n\n"
" Ignored\n\n"
"- A dictionary\n\n"
" In this case, the object's instance dictionary will be cleared and \n"
" updated with the new state.\n\n"
"- A two-tuple with a string as the first element. \n\n"
" In this case, the method named by the string in the first element will\n"
" be called with the second element.\n\n"
" This form supports migration of data formats.\n\n"
"- A two-tuple with None or a Dictionary as the first element and\n"
" with a dictionary as the second element.\n\n"
" If the first element is not None, then the object's instance dictionary \n"
" will be cleared and updated with the value.\n\n"
" The items in the second element will be assigned as attributes.\n"
;
static PyObject *
pickle___setstate__(PyObject *self, PyObject *state)
{
PyObject *slots=NULL;
if (PyTuple_Check(state))
{
if (!PyArg_ParseTuple(state, "OO:__setstate__", &state, &slots))
return NULL;
}
if (state != Py_None)
{
PyObject **dict;
dict = _PyObject_GetDictPtr(self);
if (!dict)
{
PyErr_SetString(PyExc_TypeError,
"this object has no instance dictionary");
return NULL;
}
if (!*dict)
{
*dict = PyDict_New();
if (!*dict)
return NULL;
}
PyDict_Clear(*dict);
if (PyDict_Update(*dict, state) < 0)
return NULL;
}
if (slots && pickle_setattrs_from_dict(self, slots) < 0)
return NULL;
Py_INCREF(Py_None);
return Py_None;
}
static char pickle___reduce__doc[] =
"Reduce an object to contituent parts for serialization\n"
;
static PyObject *
pickle___reduce__(PyObject *self)
{
PyObject *args=NULL, *bargs=NULL, *state=NULL, *getnewargs=NULL;
int l, i;
getnewargs = PyObject_GetAttr(self, py___getnewargs__);
if (getnewargs)
{
bargs = PyObject_CallFunctionObjArgs(getnewargs, NULL);
Py_DECREF(getnewargs);
if (!bargs)
return NULL;
l = PyTuple_Size(bargs);
if (l < 0)
goto end;
}
else
{
PyErr_Clear();
l = 0;
}
args = PyTuple_New(l+1);
if (args == NULL)
goto end;
Py_INCREF(self->ob_type);
PyTuple_SET_ITEM(args, 0, (PyObject*)(self->ob_type));
for (i = 0; i < l; i++)
{
Py_INCREF(PyTuple_GET_ITEM(bargs, i));
PyTuple_SET_ITEM(args, i+1, PyTuple_GET_ITEM(bargs, i));
}
state = PyObject_CallMethodObjArgs(self, py___getstate__, NULL);
if (!state)
goto end;
state = Py_BuildValue("(OON)", __newobj__, args, state);
end:
Py_XDECREF(bargs);
Py_XDECREF(args);
return state;
}
/* Return the object's state, a dict or None.
If the object has no dict, it's state is None.
Otherwise, return a dict containing all the attributes that
don't start with "_v_".
The caller should not modify this dict, as it may be a reference to
the object's __dict__.
*/
static PyObject *
Per__getstate__(cPersistentObject *self)
{
/* TODO: Should it be an error to call __getstate__() on a ghost? */
if (unghostify(self) < 0)
return NULL;
/* TODO: should we increment stickyness? Tim doesn't understand that
question. S*/
return pickle___getstate__((PyObject*)self);
}
/* The Persistent base type provides a traverse function, but not a
clear function. An instance of a Persistent subclass will have
its dict cleared through subtype_clear().
There is always a cycle between a persistent object and its cache.
When the cycle becomes unreachable, the clear function for the
cache will break the cycle. Thus, the persistent object need not
have a clear function. It would be complex to write a clear function
for the objects, if we needed one, because of the reference count
tricks done by the cache.
*/
static void
Per_dealloc(cPersistentObject *self)
{
if (self->state >= 0)
{
/* If the cache has been cleared, then a non-ghost object
isn't in the ring any longer.
*/
if (self->ring.r_next != NULL)
{
/* if we're ghostifying an object, we better have some non-ghosts */
assert(self->cache->non_ghost_count > 0);
self->cache->non_ghost_count--;
self->cache->total_estimated_size -=
_estimated_size_in_bytes(self->estimated_size);
ring_del(&self->ring);
}
}
if (self->cache)
cPersistenceCAPI->percachedel(self->cache, self->oid);
Py_XDECREF(self->cache);
Py_XDECREF(self->jar);
Py_XDECREF(self->oid);
self->ob_type->tp_free(self);
}
static int
Per_traverse(cPersistentObject *self, visitproc visit, void *arg)
{
int err;
#define VISIT(SLOT) \
if (SLOT) { \
err = visit((PyObject *)(SLOT), arg); \
if (err) \
return err; \
}
VISIT(self->jar);
VISIT(self->oid);
VISIT(self->cache);
#undef VISIT
return 0;
}
/* convert_name() returns a new reference to a string name
or sets an exception and returns NULL.
*/
static PyObject *
convert_name(PyObject *name)
{
#ifdef Py_USING_UNICODE
/* The Unicode to string conversion is done here because the
existing tp_setattro slots expect a string object as name
and we wouldn't want to break those. */
if (PyUnicode_Check(name))
{
name = PyUnicode_AsEncodedString(name, NULL, NULL);
}
else
#endif
if (!PyString_Check(name))
{
PyErr_SetString(PyExc_TypeError, "attribute name must be a string");
return NULL;
}
else
Py_INCREF(name);
return name;
}
/* Returns true if the object requires unghostification.
There are several special attributes that we allow access to without
requiring that the object be unghostified:
__class__
__del__
__dict__
__of__
__setstate__
*/
static int
unghost_getattr(const char *s)
{
if (*s++ != '_')
return 1;
if (*s == 'p')
{
s++;
if (*s == '_')
return 0; /* _p_ */
else
return 1;
}
else if (*s == '_')
{
s++;
switch (*s)
{
case 'c':
return strcmp(s, "class__");
case 'd':
s++;
if (!strcmp(s, "el__"))
return 0; /* __del__ */
if (!strcmp(s, "ict__"))
return 0; /* __dict__ */
return 1;
case 'o':
return strcmp(s, "of__");
case 's':
return strcmp(s, "setstate__");
default:
return 1;
}
}
return 1;
}
static PyObject*
Per_getattro(cPersistentObject *self, PyObject *name)
{
PyObject *result = NULL; /* guilty until proved innocent */
char *s;
name = convert_name(name);
if (!name)
goto Done;
s = PyString_AS_STRING(name);
if (unghost_getattr(s))
{
if (unghostify(self) < 0)
goto Done;
accessed(self);
}
result = PyObject_GenericGetAttr((PyObject *)self, name);
Done:
Py_XDECREF(name);
return result;
}
/* Exposed as _p_getattr method. Test whether base getattr should be used */
static PyObject *
Per__p_getattr(cPersistentObject *self, PyObject *name)
{
PyObject *result = NULL; /* guilty until proved innocent */
char *s;
name = convert_name(name);
if (!name)
goto Done;
s = PyString_AS_STRING(name);
if (*s != '_' || unghost_getattr(s))
{
if (unghostify(self) < 0)
goto Done;
accessed(self);
result = Py_False;
}
else
result = Py_True;
Py_INCREF(result);
Done:
Py_XDECREF(name);
return result;
}
/*
TODO: we should probably not allow assignment of __class__ and __dict__.
*/
static int
Per_setattro(cPersistentObject *self, PyObject *name, PyObject *v)
{
int result = -1; /* guilty until proved innocent */
char *s;
name = convert_name(name);
if (!name)
goto Done;
s = PyString_AS_STRING(name);
if (strncmp(s, "_p_", 3) != 0)
{
if (unghostify(self) < 0)
goto Done;
accessed(self);
if (strncmp(s, "_v_", 3) != 0
&& self->state != cPersistent_CHANGED_STATE)
{
if (changed(self) < 0)
goto Done;
}
}
result = PyObject_GenericSetAttr((PyObject *)self, name, v);
Done:
Py_XDECREF(name);
return result;
}
static int
Per_p_set_or_delattro(cPersistentObject *self, PyObject *name, PyObject *v)
{
int result = -1; /* guilty until proved innocent */
char *s;
name = convert_name(name);
if (!name)
goto Done;
s = PyString_AS_STRING(name);
if (strncmp(s, "_p_", 3))
{
if (unghostify(self) < 0)
goto Done;
accessed(self);
result = 0;
}
else
{
if (PyObject_GenericSetAttr((PyObject *)self, name, v) < 0)
goto Done;
result = 1;
}
Done:
Py_XDECREF(name);
return result;
}
static PyObject *
Per__p_setattr(cPersistentObject *self, PyObject *args)
{
PyObject *name, *v, *result;
int r;
if (!PyArg_ParseTuple(args, "OO:_p_setattr", &name, &v))
return NULL;
r = Per_p_set_or_delattro(self, name, v);
if (r < 0)
return NULL;
result = r ? Py_True : Py_False;
Py_INCREF(result);
return result;
}
static PyObject *
Per__p_delattr(cPersistentObject *self, PyObject *name)
{
int r;
PyObject *result;
r = Per_p_set_or_delattro(self, name, NULL);
if (r < 0)
return NULL;
result = r ? Py_True : Py_False;
Py_INCREF(result);
return result;
}
static PyObject *
Per_get_changed(cPersistentObject *self)
{
if (self->state < 0)
{
Py_INCREF(Py_None);
return Py_None;
}
return PyBool_FromLong(self->state == cPersistent_CHANGED_STATE);
}
static int
Per_set_changed(cPersistentObject *self, PyObject *v)
{
int deactivate = 0;
int true;
if (!v)
{
/* delattr is used to invalidate an object even if it has changed. */
if (self->state != cPersistent_GHOST_STATE)
self->state = cPersistent_UPTODATE_STATE;
deactivate = 1;
}
else if (v == Py_None)
deactivate = 1;
if (deactivate)
{
PyObject *res, *meth;
meth = PyObject_GetAttr((PyObject *)self, py__p_deactivate);
if (meth == NULL)
return -1;
res = PyObject_CallObject(meth, NULL);
if (res)
Py_DECREF(res);
else
{
/* an error occured in _p_deactivate().
It's not clear what we should do here. The code is
obviously ignoring the exception, but it shouldn't return
0 for a getattr and set an exception. The simplest change
is to clear the exception, but that simply masks the
error.
This prints an error to stderr just like exceptions in
__del__(). It would probably be better to log it but that
would be painful from C.
*/
PyErr_WriteUnraisable(meth);
}
Py_DECREF(meth);
return 0;
}
/* !deactivate. If passed a true argument, mark self as changed (starting
* with ZODB 3.6, that includes activating the object if it's a ghost).
* If passed a false argument, and the object isn't a ghost, set the
* state as up-to-date.
*/
true = PyObject_IsTrue(v);
if (true == -1)
return -1;
if (true)
{
if (self->state < 0)
{
if (unghostify(self) < 0)
return -1;
}
return changed(self);
}
/* We were passed a false, non-None argument. If we're not a ghost,
* mark self as up-to-date.
*/
if (self->state >= 0)
self->state = cPersistent_UPTODATE_STATE;
return 0;
}
static PyObject *
Per_get_oid(cPersistentObject *self)
{
PyObject *oid = self->oid ? self->oid : Py_None;
Py_INCREF(oid);
return oid;
}
static int
Per_set_oid(cPersistentObject *self, PyObject *v)
{
if (self->cache)
{
int result;
if (v == NULL)
{
PyErr_SetString(PyExc_ValueError,
"can't delete _p_oid of cached object");
return -1;
}
if (PyObject_Cmp(self->oid, v, &result) < 0)
return -1;
if (result)
{
PyErr_SetString(PyExc_ValueError,
"can not change _p_oid of cached object");
return -1;
}
}
Py_XDECREF(self->oid);
Py_XINCREF(v);
self->oid = v;
return 0;
}
static PyObject *
Per_get_jar(cPersistentObject *self)
{
PyObject *jar = self->jar ? self->jar : Py_None;
Py_INCREF(jar);
return jar;
}
static int
Per_set_jar(cPersistentObject *self, PyObject *v)
{
if (self->cache)
{
int result;
if (v == NULL)
{
PyErr_SetString(PyExc_ValueError,
"can't delete _p_jar of cached object");
return -1;
}
if (PyObject_Cmp(self->jar, v, &result) < 0)
return -1;
if (result)
{
PyErr_SetString(PyExc_ValueError,
"can not change _p_jar of cached object");
return -1;
}
}
Py_XDECREF(self->jar);
Py_XINCREF(v);
self->jar = v;
return 0;
}
static PyObject *
Per_get_serial(cPersistentObject *self)
{
return PyString_FromStringAndSize(self->serial, 8);
}
static int
Per_set_serial(cPersistentObject *self, PyObject *v)
{
if (v)
{
if (PyString_Check(v) && PyString_GET_SIZE(v) == 8)
memcpy(self->serial, PyString_AS_STRING(v), 8);
else
{
PyErr_SetString(PyExc_ValueError,
"_p_serial must be an 8-character string");
return -1;
}
}
else
memset(self->serial, 0, 8);
return 0;
}
static PyObject *
Per_get_mtime(cPersistentObject *self)
{
PyObject *t, *v;
if (unghostify(self) < 0)
return NULL;
accessed(self);
if (memcmp(self->serial, "\0\0\0\0\0\0\0\0", 8) == 0)
{
Py_INCREF(Py_None);
return Py_None;
}
t = PyObject_CallFunction(TimeStamp, "s#", self->serial, 8);
if (!t)
return NULL;
v = PyObject_CallMethod(t, "timeTime", "");
Py_DECREF(t);
return v;
}
static PyObject *
Per_get_state(cPersistentObject *self)
{
return PyInt_FromLong(self->state);
}
static PyObject *
Per_get_estimated_size(cPersistentObject *self)
{
return PyInt_FromLong(_estimated_size_in_bytes(self->estimated_size));
}
static int
Per_set_estimated_size(cPersistentObject *self, PyObject *v)
{
if (v)
{
if (PyInt_Check(v))
{
long lv = PyInt_AS_LONG(v);
if (lv < 0)
{
PyErr_SetString(PyExc_ValueError,
"_p_estimated_size must not be negative");
return -1;
}
self->estimated_size = _estimated_size_in_24_bits(lv);
}
else
{
PyErr_SetString(PyExc_ValueError,
"_p_estimated_size must be an integer");
return -1;
}
}
else
self->estimated_size = 0;
return 0;
}
static PyGetSetDef Per_getsets[] = {
{"_p_changed", (getter)Per_get_changed, (setter)Per_set_changed},
{"_p_jar", (getter)Per_get_jar, (setter)Per_set_jar},
{"_p_mtime", (getter)Per_get_mtime},
{"_p_oid", (getter)Per_get_oid, (setter)Per_set_oid},
{"_p_serial", (getter)Per_get_serial, (setter)Per_set_serial},
{"_p_state", (getter)Per_get_state},
{"_p_estimated_size",
(getter)Per_get_estimated_size, (setter)Per_set_estimated_size
},
{NULL}
};
static struct PyMethodDef Per_methods[] = {
{"_p_deactivate", (PyCFunction)Per__p_deactivate, METH_NOARGS,
"_p_deactivate() -- Deactivate the object"},
{"_p_activate", (PyCFunction)Per__p_activate, METH_NOARGS,
"_p_activate() -- Activate the object"},
{"_p_invalidate", (PyCFunction)Per__p_invalidate, METH_NOARGS,
"_p_invalidate() -- Invalidate the object"},
{"_p_getattr", (PyCFunction)Per__p_getattr, METH_O,
"_p_getattr(name) -- Test whether the base class must handle the name\n"
"\n"
"The method unghostifies the object, if necessary.\n"
"The method records the object access, if necessary.\n"
"\n"
"This method should be called by subclass __getattribute__\n"
"implementations before doing anything else. If the method\n"
"returns True, then __getattribute__ implementations must delegate\n"
"to the base class, Persistent.\n"
},
{"_p_setattr", (PyCFunction)Per__p_setattr, METH_VARARGS,
"_p_setattr(name, value) -- Save persistent meta data\n"
"\n"
"This method should be called by subclass __setattr__ implementations\n"
"before doing anything else. If it returns true, then the attribute\n"
"was handled by the base class.\n"
"\n"
"The method unghostifies the object, if necessary.\n"
"The method records the object access, if necessary.\n"
},
{"_p_delattr", (PyCFunction)Per__p_delattr, METH_O,
"_p_delattr(name) -- Delete persistent meta data\n"
"\n"
"This method should be called by subclass __delattr__ implementations\n"
"before doing anything else. If it returns true, then the attribute\n"
"was handled by the base class.\n"
"\n"
"The method unghostifies the object, if necessary.\n"
"The method records the object access, if necessary.\n"
},
{"__getstate__", (PyCFunction)Per__getstate__, METH_NOARGS,
pickle___getstate__doc },
{"__setstate__", (PyCFunction)pickle___setstate__, METH_O,
pickle___setstate__doc},
{"__reduce__", (PyCFunction)pickle___reduce__, METH_NOARGS,
pickle___reduce__doc},
{NULL, NULL} /* sentinel */
};
/* This module is compiled as a shared library. Some compilers don't
allow addresses of Python objects defined in other libraries to be
used in static initializers here. The DEFERRED_ADDRESS macro is
used to tag the slots where such addresses appear; the module init
function must fill in the tagged slots at runtime. The argument is
for documentation -- the macro ignores it.
*/
#define DEFERRED_ADDRESS(ADDR) 0
static PyTypeObject Pertype = {
PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyPersist_MetaType))
0, /* ob_size */
"persistent.Persistent", /* tp_name */
sizeof(cPersistentObject), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)Per_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
(getattrofunc)Per_getattro, /* tp_getattro */
(setattrofunc)Per_setattro, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
/* tp_flags */
0, /* tp_doc */
(traverseproc)Per_traverse, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
Per_methods, /* tp_methods */
0, /* tp_members */
Per_getsets, /* tp_getset */
};
/* End of code for Persistent objects */
/* -------------------------------------------------------- */
typedef int (*intfunctionwithpythonarg)(PyObject*);
/* Load the object's state if necessary and become sticky */
static int
Per_setstate(cPersistentObject *self)
{
if (unghostify(self) < 0)
return -1;
self->state = cPersistent_STICKY_STATE;
return 0;
}
static PyObject *
simple_new(PyObject *self, PyObject *type_object)
{
if (!PyType_Check(type_object))
{
PyErr_SetString(PyExc_TypeError,
"simple_new argument must be a type object.");
return NULL;
}
return PyType_GenericNew((PyTypeObject *)type_object, NULL, NULL);
}
static PyMethodDef cPersistence_methods[] =
{
{"simple_new", simple_new, METH_O,
"Create an object by simply calling a class's __new__ method without "
"arguments."},
{NULL, NULL}
};
static cPersistenceCAPIstruct
truecPersistenceCAPI = {
&Pertype,
(getattrofunc)Per_getattro, /*tp_getattr with object key*/
(setattrofunc)Per_setattro, /*tp_setattr with object key*/
changed,
accessed,
ghostify,
(intfunctionwithpythonarg)Per_setstate,
NULL, /* The percachedel slot is initialized in cPickleCache.c when
the module is loaded. It uses a function in a different
shared library. */
readCurrent
};
void
initcPersistence(void)
{
PyObject *m, *s;
PyObject *copy_reg;
if (init_strings() < 0)
return;
m = Py_InitModule3("cPersistence", cPersistence_methods,
cPersistence_doc_string);
Pertype.ob_type = &PyType_Type;
Pertype.tp_new = PyType_GenericNew;
if (PyType_Ready(&Pertype) < 0)
return;
if (PyModule_AddObject(m, "Persistent", (PyObject *)&Pertype) < 0)
return;
cPersistenceCAPI = &truecPersistenceCAPI;
s = PyCObject_FromVoidPtr(cPersistenceCAPI, NULL);
if (!s)
return;
if (PyModule_AddObject(m, "CAPI", s) < 0)
return;
if (PyModule_AddIntConstant(m, "GHOST", cPersistent_GHOST_STATE) < 0)
return;
if (PyModule_AddIntConstant(m, "UPTODATE", cPersistent_UPTODATE_STATE) < 0)
return;
if (PyModule_AddIntConstant(m, "CHANGED", cPersistent_CHANGED_STATE) < 0)
return;
if (PyModule_AddIntConstant(m, "STICKY", cPersistent_STICKY_STATE) < 0)
return;
py_simple_new = PyObject_GetAttrString(m, "simple_new");
if (!py_simple_new)
return;
copy_reg = PyImport_ImportModule("copy_reg");
if (!copy_reg)
return;
copy_reg_slotnames = PyObject_GetAttrString(copy_reg, "_slotnames");
if (!copy_reg_slotnames)
{
Py_DECREF(copy_reg);
return;
}
__newobj__ = PyObject_GetAttrString(copy_reg, "__newobj__");
if (!__newobj__)
{
Py_DECREF(copy_reg);
return;
}
if (!TimeStamp)
{
m = PyImport_ImportModule("persistent.TimeStamp");
if (!m)
return;
TimeStamp = PyObject_GetAttrString(m, "TimeStamp");
Py_DECREF(m);
/* fall through to immediate return on error */
}
}
/*****************************************************************************
Copyright (c) 2001, 2002 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
#ifndef CPERSISTENCE_H
#define CPERSISTENCE_H
#include "Python.h"
#include "py24compat.h"
#include "ring.h"
#define CACHE_HEAD \
PyObject_HEAD \
CPersistentRing ring_home; \
int non_ghost_count; \
PY_LONG_LONG total_estimated_size;
struct ccobject_head_struct;
typedef struct ccobject_head_struct PerCache;
/* How big is a persistent object?
12 PyGC_Head is two pointers and an int
8 PyObject_HEAD is an int and a pointer
12 jar, oid, cache pointers
8 ring struct
8 serialno
4 state + extra
4 size info
(56) so far
4 dict ptr
4 weaklist ptr
-------------------------
68 only need 62, but obmalloc rounds up to multiple of eight
Even a ghost requires 64 bytes. It's possible to make a persistent
instance with slots and no dict, which changes the storage needed.
*/
#define cPersistent_HEAD \
PyObject_HEAD \
PyObject *jar; \
PyObject *oid; \
PerCache *cache; \
CPersistentRing ring; \
char serial[8]; \
signed state:8; \
unsigned estimated_size:24;
/* We recently added estimated_size. We originally added it as a new
unsigned long field after a signed char state field and a
3-character reserved field. This didn't work because there
are packages in the wild that have their own copies of cPersistence.h
that didn't see the update.
To get around this, we used the reserved space by making
estimated_size a 24-bit bit field in the space occupied by the old
3-character reserved field. To fit in 24 bits, we made the units
of estimated_size 64-character blocks. This allows is to handle up
to a GB. We should never see that, but to be paranoid, we also
truncate sizes greater than 1GB. We also set the minimum size to
64 bytes.
We use the _estimated_size_in_24_bits and _estimated_size_in_bytes
macros both to avoid repetition and to make intent a little clearer.
*/
#define _estimated_size_in_24_bits(I) ((I) > 1073741696 ? 16777215 : (I)/64+1)
#define _estimated_size_in_bytes(I) ((I)*64)
#define cPersistent_GHOST_STATE -1
#define cPersistent_UPTODATE_STATE 0
#define cPersistent_CHANGED_STATE 1
#define cPersistent_STICKY_STATE 2
typedef struct {
cPersistent_HEAD
} cPersistentObject;
typedef void (*percachedelfunc)(PerCache *, PyObject *);
typedef struct {
PyTypeObject *pertype;
getattrofunc getattro;
setattrofunc setattro;
int (*changed)(cPersistentObject*);
void (*accessed)(cPersistentObject*);
void (*ghostify)(cPersistentObject*);
int (*setstate)(PyObject*);
percachedelfunc percachedel;
int (*readCurrent)(cPersistentObject*);
} cPersistenceCAPIstruct;
#define cPersistenceType cPersistenceCAPI->pertype
#ifndef DONT_USE_CPERSISTENCECAPI
static cPersistenceCAPIstruct *cPersistenceCAPI;
#endif
#define cPersistanceModuleName "cPersistence"
#define PER_TypeCheck(O) PyObject_TypeCheck((O), cPersistenceCAPI->pertype)
#define PER_USE_OR_RETURN(O,R) {if((O)->state==cPersistent_GHOST_STATE && cPersistenceCAPI->setstate((PyObject*)(O)) < 0) return (R); else if ((O)->state==cPersistent_UPTODATE_STATE) (O)->state=cPersistent_STICKY_STATE;}
#define PER_CHANGED(O) (cPersistenceCAPI->changed((cPersistentObject*)(O)))
#define PER_READCURRENT(O, E) \
if (cPersistenceCAPI->readCurrent((cPersistentObject*)(O)) < 0) { E; }
#define PER_GHOSTIFY(O) (cPersistenceCAPI->ghostify((cPersistentObject*)(O)))
/* If the object is sticky, make it non-sticky, so that it can be ghostified.
The value is not meaningful
*/
#define PER_ALLOW_DEACTIVATION(O) ((O)->state==cPersistent_STICKY_STATE && ((O)->state=cPersistent_UPTODATE_STATE))
#define PER_PREVENT_DEACTIVATION(O) ((O)->state==cPersistent_UPTODATE_STATE && ((O)->state=cPersistent_STICKY_STATE))
/*
Make a persistent object usable from C by:
- Making sure it is not a ghost
- Making it sticky.
IMPORTANT: If you call this and don't call PER_ALLOW_DEACTIVATION,
your object will not be ghostified.
PER_USE returns a 1 on success and 0 failure, where failure means
error.
*/
#define PER_USE(O) \
(((O)->state != cPersistent_GHOST_STATE \
|| (cPersistenceCAPI->setstate((PyObject*)(O)) >= 0)) \
? (((O)->state==cPersistent_UPTODATE_STATE) \
? ((O)->state=cPersistent_STICKY_STATE) : 1) : 0)
#define PER_ACCESSED(O) (cPersistenceCAPI->accessed((cPersistentObject*)(O)))
#endif
/*****************************************************************************
Copyright (c) 2001, 2002 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
/*
Objects are stored under three different regimes:
Regime 1: Persistent Classes
Persistent Classes are part of ZClasses. They are stored in the
self->data dictionary, and are never garbage collected.
The klass_items() method returns a sequence of (oid,object) tuples for
every Persistent Class, which should make it possible to implement
garbage collection in Python if necessary.
Regime 2: Ghost Objects
There is no benefit to keeping a ghost object which has no external
references, therefore a weak reference scheme is used to ensure that
ghost objects are removed from memory as soon as possible, when the
last external reference is lost.
Ghost objects are stored in the self->data dictionary. Normally a
dictionary keeps a strong reference on its values, however this
reference count is 'stolen'.
This weak reference scheme leaves a dangling reference, in the
dictionary, when the last external reference is lost. To clean up this
dangling reference the persistent object dealloc function calls
self->cache->_oid_unreferenced(self->oid). The cache looks up the oid
in the dictionary, ensures it points to an object whose reference
count is zero, then removes it from the dictionary. Before removing
the object from the dictionary it must temporarily resurrect the
object in much the same way that class instances are resurrected
before their __del__ is called.
Since ghost objects are stored under a different regime to non-ghost
objects, an extra ghostify function in cPersistenceAPI replaces
self->state=GHOST_STATE assignments that were common in other
persistent classes (such as BTrees).
Regime 3: Non-Ghost Objects
Non-ghost objects are stored in two data structures: the dictionary
mapping oids to objects and a doubly-linked list that encodes the
order in which the objects were accessed. The dictionary reference is
borrowed, as it is for ghosts. The list reference is a new reference;
the list stores recently used objects, even if they are otherwise
unreferenced, to avoid loading the object from the database again.
The doubly-link-list nodes contain next and previous pointers linking
together the cache and all non-ghost persistent objects.
The node embedded in the cache is the home position. On every
attribute access a non-ghost object will relink itself just behind the
home position in the ring. Objects accessed least recently will
eventually find themselves positioned after the home position.
Occasionally other nodes are temporarily inserted in the ring as
position markers. The cache contains a ring_lock flag which must be
set and unset before and after doing so. Only if the flag is unset can
the cache assume that all nodes are either his own home node, or nodes
from persistent objects. This assumption is useful during the garbage
collection process.
The number of non-ghost objects is counted in self->non_ghost_count.
The garbage collection process consists of traversing the ring, and
deactivating (that is, turning into a ghost) every object until
self->non_ghost_count is down to the target size, or until it
reaches the home position again.
Note that objects in the sticky or changed states are still kept in
the ring, however they can not be deactivated. The garbage collection
process must skip such objects, rather than deactivating them.
*/
static char cPickleCache_doc_string[] =
"Defines the PickleCache used by ZODB Connection objects.\n"
"\n"
"$Id$\n";
#define DONT_USE_CPERSISTENCECAPI
#include "cPersistence.h"
#include "structmember.h"
#include <time.h>
#include <stddef.h>
#undef Py_FindMethod
/* Python 2.4 backward compat */
#if PY_MAJOR_VERSION <= 2 && PY_MINOR_VERSION < 5
#define Py_ssize_t int
typedef Py_ssize_t (*lenfunc)(PyObject *);
#endif
/* Python string objects to speed lookups; set by module init. */
static PyObject *py__p_changed;
static PyObject *py__p_deactivate;
static PyObject *py__p_jar;
static PyObject *py__p_oid;
static cPersistenceCAPIstruct *capi;
/* This object is the pickle cache. The CACHE_HEAD macro guarantees
that layout of this struct is the same as the start of
ccobject_head in cPersistence.c */
typedef struct {
CACHE_HEAD
int klass_count; /* count of persistent classes */
PyObject *data; /* oid -> object dict */
PyObject *jar; /* Connection object */
int cache_size; /* target number of items in cache */
PY_LONG_LONG cache_size_bytes; /* target total estimated size of
items in cache */
/* Most of the time the ring contains only:
* many nodes corresponding to persistent objects
* one 'home' node from the cache.
In some cases it is handy to temporarily add other types
of node into the ring as placeholders. 'ring_lock' is a boolean
indicating that someone has already done this. Currently this
is only used by the garbage collection code. */
int ring_lock;
/* 'cache_drain_resistance' controls how quickly the cache size will drop
when it is smaller than the configured size. A value of zero means it will
not drop below the configured size (suitable for most caches). Otherwise,
it will remove cache_non_ghost_count/cache_drain_resistance items from
the cache every time (suitable for rarely used caches, such as those
associated with Zope versions. */
int cache_drain_resistance;
} ccobject;
static int cc_ass_sub(ccobject *self, PyObject *key, PyObject *v);
/* ---------------------------------------------------------------- */
#define OBJECT_FROM_RING(SELF, HERE) \
((cPersistentObject *)(((char *)here) - offsetof(cPersistentObject, ring)))
/* Insert self into the ring, following after. */
static void
insert_after(CPersistentRing *self, CPersistentRing *after)
{
assert(self != NULL);
assert(after != NULL);
self->r_prev = after;
self->r_next = after->r_next;
after->r_next->r_prev = self;
after->r_next = self;
}
/* Remove self from the ring. */
static void
unlink_from_ring(CPersistentRing *self)
{
assert(self != NULL);
self->r_prev->r_next = self->r_next;
self->r_next->r_prev = self->r_prev;
}
static int
scan_gc_items(ccobject *self, int target, PY_LONG_LONG target_bytes)
{
/* This function must only be called with the ring lock held,
because it places non-object placeholders in the ring.
*/
cPersistentObject *object;
CPersistentRing *here;
CPersistentRing before_original_home;
int result = -1; /* guilty until proved innocent */
/* Scan the ring, from least to most recently used, deactivating
* up-to-date objects, until we either find the ring_home again or
* or we've ghosted enough objects to reach the target size.
* Tricky: __getattr__ and __del__ methods can do anything, and in
* particular if we ghostify an object with a __del__ method, that method
* can load the object again, putting it back into the MRU part of the
* ring. Waiting to find ring_home again can thus cause an infinite
* loop (Collector #1208). So before_original_home records the MRU
* position we start with, and we stop the scan when we reach that.
*/
insert_after(&before_original_home, self->ring_home.r_prev);
here = self->ring_home.r_next; /* least recently used object */
while (here != &before_original_home &&
(self->non_ghost_count > target
|| (target_bytes && self->total_estimated_size > target_bytes)
)
)
{
assert(self->ring_lock);
assert(here != &self->ring_home);
/* At this point we know that the ring only contains nodes
from persistent objects, plus our own home node. We know
this because the ring lock is held. We can safely assume
the current ring node is a persistent object now we know it
is not the home */
object = OBJECT_FROM_RING(self, here);
if (object->state == cPersistent_UPTODATE_STATE)
{
CPersistentRing placeholder;
PyObject *method;
PyObject *temp;
int error_occurred = 0;
/* deactivate it. This is the main memory saver. */
/* Add a placeholder, a dummy node in the ring. We need
to do this to mark our position in the ring. It is
possible that the PyObject_GetAttr() call below will
invoke a __getattr__() hook in Python. Also possible
that deactivation will lead to a __del__ method call.
So another thread might run, and mutate the ring as a side
effect of object accesses. There's no predicting then where
in the ring here->next will point after that. The
placeholder won't move as a side effect of calling Python
code.
*/
insert_after(&placeholder, here);
method = PyObject_GetAttr((PyObject *)object, py__p_deactivate);
if (method == NULL)
error_occurred = 1;
else
{
temp = PyObject_CallObject(method, NULL);
Py_DECREF(method);
if (temp == NULL)
error_occurred = 1;
else
Py_DECREF(temp);
}
here = placeholder.r_next;
unlink_from_ring(&placeholder);
if (error_occurred)
goto Done;
}
else
here = here->r_next;
}
result = 0;
Done:
unlink_from_ring(&before_original_home);
return result;
}
static PyObject *
lockgc(ccobject *self, int target_size, PY_LONG_LONG target_size_bytes)
{
/* This is thread-safe because of the GIL, and there's nothing
* in between checking the ring_lock and acquiring it that calls back
* into Python.
*/
if (self->ring_lock)
{
Py_INCREF(Py_None);
return Py_None;
}
self->ring_lock = 1;
if (scan_gc_items(self, target_size, target_size_bytes) < 0)
{
self->ring_lock = 0;
return NULL;
}
self->ring_lock = 0;
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
cc_incrgc(ccobject *self, PyObject *args)
{
int obsolete_arg = -999;
int starting_size = self->non_ghost_count;
int target_size = self->cache_size;
PY_LONG_LONG target_size_bytes = self->cache_size_bytes;
if (self->cache_drain_resistance >= 1)
{
/* This cache will gradually drain down to a small size. Check
a (small) number of objects proportional to the current size */
int target_size_2 = (starting_size - 1
- starting_size / self->cache_drain_resistance);
if (target_size_2 < target_size)
target_size = target_size_2;
}
if (!PyArg_ParseTuple(args, "|i:incrgc", &obsolete_arg))
return NULL;
if (obsolete_arg != -999
&&
(PyErr_Warn(PyExc_DeprecationWarning,
"No argument expected")
< 0))
return NULL;
return lockgc(self, target_size, target_size_bytes);
}
static PyObject *
cc_full_sweep(ccobject *self, PyObject *args)
{
int dt = -999;
/* TODO: This should be deprecated; */
if (!PyArg_ParseTuple(args, "|i:full_sweep", &dt))
return NULL;
if (dt == -999)
return lockgc(self, 0, 0);
else
return cc_incrgc(self, args);
}
static PyObject *
cc_minimize(ccobject *self, PyObject *args)
{
int ignored = -999;
if (!PyArg_ParseTuple(args, "|i:minimize", &ignored))
return NULL;
if (ignored != -999
&&
(PyErr_Warn(PyExc_DeprecationWarning,
"No argument expected")
< 0))
return NULL;
return lockgc(self, 0, 0);
}
static int
_invalidate(ccobject *self, PyObject *key)
{
static PyObject *_p_invalidate = NULL;
PyObject *meth, *v;
v = PyDict_GetItem(self->data, key);
if (v == NULL)
return 0;
if (_p_invalidate == NULL)
{
_p_invalidate = PyString_InternFromString("_p_invalidate");
if (_p_invalidate == NULL)
{
/* It doesn't make any sense to ignore this error, but
the caller ignores all errors.
TODO: and why does it do that? This should be fixed
*/
return -1;
}
}
if (v->ob_refcnt <= 1 && PyType_Check(v))
{
/* This looks wrong, but it isn't. We use strong references to types
because they don't have the ring members.
The result is that we *never* remove classes unless
they are modified. We can fix this by using wekrefs uniformly.
*/
self->klass_count--;
return PyDict_DelItem(self->data, key);
}
meth = PyObject_GetAttr(v, _p_invalidate);
if (meth == NULL)
return -1;
v = PyObject_CallObject(meth, NULL);
Py_DECREF(meth);
if (v == NULL)
return -1;
Py_DECREF(v);
return 0;
}
static PyObject *
cc_invalidate(ccobject *self, PyObject *inv)
{
PyObject *key, *v;
Py_ssize_t i = 0;
if (PyDict_Check(inv))
{
while (PyDict_Next(inv, &i, &key, &v))
{
if (_invalidate(self, key) < 0)
return NULL;
}
PyDict_Clear(inv);
}
else
{
if (PyString_Check(inv))
{
if (_invalidate(self, inv) < 0)
return NULL;
}
else
{
int l, r;
l = PyObject_Length(inv);
if (l < 0)
return NULL;
for (i=l; --i >= 0; )
{
key = PySequence_GetItem(inv, i);
if (!key)
return NULL;
r = _invalidate(self, key);
Py_DECREF(key);
if (r < 0)
return NULL;
}
/* Dubious: modifying the input may be an unexpected side effect. */
PySequence_DelSlice(inv, 0, l);
}
}
Py_INCREF(Py_None);
return Py_None;
}
static PyObject *
cc_get(ccobject *self, PyObject *args)
{
PyObject *r, *key, *d = NULL;
if (!PyArg_ParseTuple(args, "O|O:get", &key, &d))
return NULL;
r = PyDict_GetItem(self->data, key);
if (!r)
{
if (d)
r = d;
else
r = Py_None;
}
Py_INCREF(r);
return r;
}
static PyObject *
cc_items(ccobject *self)
{
return PyObject_CallMethod(self->data, "items", "");
}
static PyObject *
cc_klass_items(ccobject *self)
{
PyObject *l,*k,*v;
Py_ssize_t p = 0;
l = PyList_New(0);
if (l == NULL)
return NULL;
while (PyDict_Next(self->data, &p, &k, &v))
{
if(PyType_Check(v))
{
v = Py_BuildValue("OO", k, v);
if (v == NULL)
{
Py_DECREF(l);
return NULL;
}
if (PyList_Append(l, v) < 0)
{
Py_DECREF(v);
Py_DECREF(l);
return NULL;
}
Py_DECREF(v);
}
}
return l;
}
static PyObject *
cc_debug_info(ccobject *self)
{
PyObject *l,*k,*v;
Py_ssize_t p = 0;
l = PyList_New(0);
if (l == NULL)
return NULL;
while (PyDict_Next(self->data, &p, &k, &v))
{
if (v->ob_refcnt <= 0)
v = Py_BuildValue("Oi", k, v->ob_refcnt);
else if (! PyType_Check(v) &&
(v->ob_type->tp_basicsize >= sizeof(cPersistentObject))
)
v = Py_BuildValue("Oisi",
k, v->ob_refcnt, v->ob_type->tp_name,
((cPersistentObject*)v)->state);
else
v = Py_BuildValue("Ois", k, v->ob_refcnt, v->ob_type->tp_name);
if (v == NULL)
goto err;
if (PyList_Append(l, v) < 0)
goto err;
}
return l;
err:
Py_DECREF(l);
return NULL;
}
static PyObject *
cc_lru_items(ccobject *self)
{
PyObject *l;
CPersistentRing *here;
if (self->ring_lock)
{
/* When the ring lock is held, we have no way of know which
ring nodes belong to persistent objects, and which a
placeholders. */
PyErr_SetString(PyExc_ValueError,
".lru_items() is unavailable during garbage collection");
return NULL;
}
l = PyList_New(0);
if (l == NULL)
return NULL;
here = self->ring_home.r_next;
while (here != &self->ring_home)
{
PyObject *v;
cPersistentObject *object = OBJECT_FROM_RING(self, here);
if (object == NULL)
{
Py_DECREF(l);
return NULL;
}
v = Py_BuildValue("OO", object->oid, object);
if (v == NULL)
{
Py_DECREF(l);
return NULL;
}
if (PyList_Append(l, v) < 0)
{
Py_DECREF(v);
Py_DECREF(l);
return NULL;
}
Py_DECREF(v);
here = here->r_next;
}
return l;
}
static void
cc_oid_unreferenced(ccobject *self, PyObject *oid)
{
/* This is called by the persistent object deallocation function
when the reference count on a persistent object reaches
zero. We need to fix up our dictionary; its reference is now
dangling because we stole its reference count. Be careful to
not release the global interpreter lock until this is
complete. */
PyObject *v;
/* If the cache has been cleared by GC, data will be NULL. */
if (!self->data)
return;
v = PyDict_GetItem(self->data, oid);
assert(v);
assert(v->ob_refcnt == 0);
/* Need to be very hairy here because a dictionary is about
to decref an already deleted object.
*/
#ifdef Py_TRACE_REFS
/* This is called from the deallocation function after the
interpreter has untracked the reference. Track it again.
*/
_Py_NewReference(v);
/* Don't increment total refcount as a result of the
shenanigans played in this function. The _Py_NewReference()
call above creates artificial references to v.
*/
_Py_RefTotal--;
assert(v->ob_type);
#else
Py_INCREF(v);
#endif
assert(v->ob_refcnt == 1);
/* Incremement the refcount again, because delitem is going to
DECREF it. If it's refcount reached zero again, we'd call back to
the dealloc function that called us.
*/
Py_INCREF(v);
/* TODO: Should we call _Py_ForgetReference() on error exit? */
if (PyDict_DelItem(self->data, oid) < 0)
return;
Py_DECREF((ccobject *)((cPersistentObject *)v)->cache);
((cPersistentObject *)v)->cache = NULL;
assert(v->ob_refcnt == 1);
/* Undo the temporary resurrection.
Don't DECREF the object, because this function is called from
the object's dealloc function. If the refcnt reaches zero, it
will all be invoked recursively.
*/
_Py_ForgetReference(v);
}
static PyObject *
cc_ringlen(ccobject *self)
{
CPersistentRing *here;
int c = 0;
for (here = self->ring_home.r_next; here != &self->ring_home;
here = here->r_next)
c++;
return PyInt_FromLong(c);
}
static PyObject *
cc_update_object_size_estimation(ccobject *self, PyObject *args)
{
PyObject *oid;
cPersistentObject *v;
unsigned int new_size;
if (!PyArg_ParseTuple(args, "OI:updateObjectSizeEstimation",
&oid, &new_size))
return NULL;
/* Note: reference borrowed */
v = (cPersistentObject *)PyDict_GetItem(self->data, oid);
if (v)
{
/* we know this object -- update our "total_size_estimation"
we must only update when the object is in the ring
*/
if (v->ring.r_next)
{
self->total_estimated_size += _estimated_size_in_bytes(
(int)(_estimated_size_in_24_bits(new_size))
- (int)(v->estimated_size)
);
/* we do this in "Connection" as we need it even when the
object is not in the cache (or not the ring)
*/
/* v->estimated_size = new_size; */
}
}
Py_RETURN_NONE;
}
static PyObject*
cc_new_ghost(ccobject *self, PyObject *args)
{
PyObject *tmp, *key, *v;
if (!PyArg_ParseTuple(args, "OO:new_ghost", &key, &v))
return NULL;
/* Sanity check the value given to make sure it is allowed in the cache */
if (PyType_Check(v))
{
/* Its a persistent class, such as a ZClass. Thats ok. */
}
else if (v->ob_type->tp_basicsize < sizeof(cPersistentObject))
{
/* If it's not an instance of a persistent class, (ie Python
classes that derive from persistent.Persistent, BTrees,
etc), report an error.
TODO: checking sizeof() seems a poor test.
*/
PyErr_SetString(PyExc_TypeError,
"Cache values must be persistent objects.");
return NULL;
}
/* Can't access v->oid directly because the object might be a
* persistent class.
*/
tmp = PyObject_GetAttr(v, py__p_oid);
if (tmp == NULL)
return NULL;
Py_DECREF(tmp);
if (tmp != Py_None)
{
PyErr_SetString(PyExc_AssertionError,
"New ghost object must not have an oid");
return NULL;
}
/* useful sanity check, but not strictly an invariant of this class */
tmp = PyObject_GetAttr(v, py__p_jar);
if (tmp == NULL)
return NULL;
Py_DECREF(tmp);
if (tmp != Py_None)
{
PyErr_SetString(PyExc_AssertionError,
"New ghost object must not have a jar");
return NULL;
}
tmp = PyDict_GetItem(self->data, key);
if (tmp)
{
Py_DECREF(tmp);
PyErr_SetString(PyExc_AssertionError,
"The given oid is already in the cache");
return NULL;
}
if (PyType_Check(v))
{
if (PyObject_SetAttr(v, py__p_jar, self->jar) < 0)
return NULL;
if (PyObject_SetAttr(v, py__p_oid, key) < 0)
return NULL;
if (PyDict_SetItem(self->data, key, v) < 0)
return NULL;
PyObject_GC_UnTrack((void *)self->data);
self->klass_count++;
}
else
{
cPersistentObject *p = (cPersistentObject *)v;
if(p->cache != NULL)
{
PyErr_SetString(PyExc_AssertionError, "Already in a cache");
return NULL;
}
if (PyDict_SetItem(self->data, key, v) < 0)
return NULL;
/* the dict should have a borrowed reference */
PyObject_GC_UnTrack((void *)self->data);
Py_DECREF(v);
Py_INCREF(self);
p->cache = (PerCache *)self;
Py_INCREF(self->jar);
p->jar = self->jar;
Py_INCREF(key);
p->oid = key;
p->state = cPersistent_GHOST_STATE;
}
Py_RETURN_NONE;
}
static struct PyMethodDef cc_methods[] = {
{"items", (PyCFunction)cc_items, METH_NOARGS,
"Return list of oid, object pairs for all items in cache."},
{"lru_items", (PyCFunction)cc_lru_items, METH_NOARGS,
"List (oid, object) pairs from the lru list, as 2-tuples."},
{"klass_items", (PyCFunction)cc_klass_items, METH_NOARGS,
"List (oid, object) pairs of cached persistent classes."},
{"full_sweep", (PyCFunction)cc_full_sweep, METH_VARARGS,
"full_sweep() -- Perform a full sweep of the cache."},
{"minimize", (PyCFunction)cc_minimize, METH_VARARGS,
"minimize([ignored]) -- Remove as many objects as possible\n\n"
"Ghostify all objects that are not modified. Takes an optional\n"
"argument, but ignores it."},
{"incrgc", (PyCFunction)cc_incrgc, METH_VARARGS,
"incrgc() -- Perform incremental garbage collection\n\n"
"This method had been depricated!"
"Some other implementations support an optional parameter 'n' which\n"
"indicates a repetition count; this value is ignored."},
{"invalidate", (PyCFunction)cc_invalidate, METH_O,
"invalidate(oids) -- invalidate one, many, or all ids"},
{"get", (PyCFunction)cc_get, METH_VARARGS,
"get(key [, default]) -- get an item, or a default"},
{"ringlen", (PyCFunction)cc_ringlen, METH_NOARGS,
"ringlen() -- Returns number of non-ghost items in cache."},
{"debug_info", (PyCFunction)cc_debug_info, METH_NOARGS,
"debug_info() -- Returns debugging data about objects in the cache."},
{"update_object_size_estimation",
(PyCFunction)cc_update_object_size_estimation,
METH_VARARGS,
"update_object_size_estimation(oid, new_size) -- "
"update the caches size estimation for *oid* "
"(if this is known to the cache)."},
{"new_ghost", (PyCFunction)cc_new_ghost, METH_VARARGS,
"new_ghost() -- Initialize a ghost and add it to the cache."},
{NULL, NULL} /* sentinel */
};
static int
cc_init(ccobject *self, PyObject *args, PyObject *kwds)
{
int cache_size = 100;
PY_LONG_LONG cache_size_bytes = 0;
PyObject *jar;
if (!PyArg_ParseTuple(args, "O|iL", &jar, &cache_size, &cache_size_bytes))
return -1;
self->jar = NULL;
self->data = PyDict_New();
if (self->data == NULL)
{
Py_DECREF(self);
return -1;
}
/* Untrack the dict mapping oids to objects.
The dict contains uncounted references to ghost objects, so it
isn't safe for GC to visit it. If GC finds an object with more
referents that refcounts, it will die with an assertion failure.
When the cache participates in GC, it will need to traverse the
objects in the doubly-linked list, which will account for all the
non-ghost objects.
*/
PyObject_GC_UnTrack((void *)self->data);
self->jar = jar;
Py_INCREF(jar);
self->cache_size = cache_size;
self->cache_size_bytes = cache_size_bytes;
self->non_ghost_count = 0;
self->total_estimated_size = 0;
self->klass_count = 0;
self->cache_drain_resistance = 0;
self->ring_lock = 0;
self->ring_home.r_next = &self->ring_home;
self->ring_home.r_prev = &self->ring_home;
return 0;
}
static void
cc_dealloc(ccobject *self)
{
Py_XDECREF(self->data);
Py_XDECREF(self->jar);
PyObject_GC_Del(self);
}
static int
cc_clear(ccobject *self)
{
Py_ssize_t pos = 0;
PyObject *k, *v;
/* Clearing the cache is delicate.
A non-ghost object will show up in the ring and in the dict. If
we deallocating the dict before clearing the ring, the GC will
decref each object in the dict. Since the dict references are
uncounted, this will lead to objects having negative refcounts.
Freeing the non-ghost objects should eliminate many objects from
the cache, but there may still be ghost objects left. It's
not safe to decref the dict until it's empty, so we need to manually
clear those out of the dict, too. We accomplish that by replacing
all the ghost objects with None.
*/
/* We don't need to lock the ring, because the cache is unreachable.
It should be impossible for anyone to be modifying the cache.
*/
assert(! self->ring_lock);
while (self->ring_home.r_next != &self->ring_home)
{
CPersistentRing *here = self->ring_home.r_next;
cPersistentObject *o = OBJECT_FROM_RING(self, here);
if (o->cache)
{
Py_INCREF(o); /* account for uncounted reference */
if (PyDict_DelItem(self->data, o->oid) < 0)
return -1;
}
o->cache = NULL;
Py_DECREF(self);
self->ring_home.r_next = here->r_next;
o->ring.r_prev = NULL;
o->ring.r_next = NULL;
Py_DECREF(o);
here = here->r_next;
}
Py_XDECREF(self->jar);
while (PyDict_Next(self->data, &pos, &k, &v))
{
Py_INCREF(v);
if (PyDict_SetItem(self->data, k, Py_None) < 0)
return -1;
}
Py_XDECREF(self->data);
self->data = NULL;
self->jar = NULL;
return 0;
}
static int
cc_traverse(ccobject *self, visitproc visit, void *arg)
{
int err;
CPersistentRing *here;
/* If we're in the midst of cleaning up old objects, the ring contains
* assorted junk we must not pass on to the visit() callback. This
* should be rare (our cleanup code would need to have called back
* into Python, which in turn triggered Python's gc). When it happens,
* simply don't chase any pointers. The cache will appear to be a
* source of external references then, and at worst we miss cleaning
* up a dead cycle until the next time Python's gc runs.
*/
if (self->ring_lock)
return 0;
#define VISIT(SLOT) \
if (SLOT) { \
err = visit((PyObject *)(SLOT), arg); \
if (err) \
return err; \
}
VISIT(self->jar);
here = self->ring_home.r_next;
/* It is possible that an object is traversed after it is cleared.
In that case, there is no ring.
*/
if (!here)
return 0;
while (here != &self->ring_home)
{
cPersistentObject *o = OBJECT_FROM_RING(self, here);
VISIT(o);
here = here->r_next;
}
#undef VISIT
return 0;
}
static Py_ssize_t
cc_length(ccobject *self)
{
return PyObject_Length(self->data);
}
static PyObject *
cc_subscript(ccobject *self, PyObject *key)
{
PyObject *r;
r = PyDict_GetItem(self->data, key);
if (r == NULL)
{
PyErr_SetObject(PyExc_KeyError, key);
return NULL;
}
Py_INCREF(r);
return r;
}
static int
cc_add_item(ccobject *self, PyObject *key, PyObject *v)
{
int result;
PyObject *oid, *object_again, *jar;
cPersistentObject *p;
/* Sanity check the value given to make sure it is allowed in the cache */
if (PyType_Check(v))
{
/* Its a persistent class, such as a ZClass. Thats ok. */
}
else if (v->ob_type->tp_basicsize < sizeof(cPersistentObject))
{
/* If it's not an instance of a persistent class, (ie Python
classes that derive from persistent.Persistent, BTrees,
etc), report an error.
TODO: checking sizeof() seems a poor test.
*/
PyErr_SetString(PyExc_TypeError,
"Cache values must be persistent objects.");
return -1;
}
/* Can't access v->oid directly because the object might be a
* persistent class.
*/
oid = PyObject_GetAttr(v, py__p_oid);
if (oid == NULL)
return -1;
if (! PyString_Check(oid))
{
Py_DECREF(oid);
PyErr_Format(PyExc_TypeError,
"Cached object oid must be a string, not a %s",
oid->ob_type->tp_name);
return -1;
}
/* we know they are both strings.
* now check if they are the same string.
*/
result = PyObject_Compare(key, oid);
if (PyErr_Occurred())
{
Py_DECREF(oid);
return -1;
}
Py_DECREF(oid);
if (result)
{
PyErr_SetString(PyExc_ValueError, "Cache key does not match oid");
return -1;
}
/* useful sanity check, but not strictly an invariant of this class */
jar = PyObject_GetAttr(v, py__p_jar);
if (jar == NULL)
return -1;
if (jar==Py_None)
{
Py_DECREF(jar);
PyErr_SetString(PyExc_ValueError,
"Cached object jar missing");
return -1;
}
Py_DECREF(jar);
object_again = PyDict_GetItem(self->data, key);
if (object_again)
{
if (object_again != v)
{
PyErr_SetString(PyExc_ValueError,
"A different object already has the same oid");
return -1;
}
else
{
/* re-register under the same oid - no work needed */
return 0;
}
}
if (PyType_Check(v))
{
if (PyDict_SetItem(self->data, key, v) < 0)
return -1;
PyObject_GC_UnTrack((void *)self->data);
self->klass_count++;
return 0;
}
else
{
PerCache *cache = ((cPersistentObject *)v)->cache;
if (cache)
{
if (cache != (PerCache *)self)
/* This object is already in a different cache. */
PyErr_SetString(PyExc_ValueError,
"Cache values may only be in one cache.");
return -1;
}
/* else:
This object is already one of ours, which is ok. It
would be very strange if someone was trying to register
the same object under a different key.
*/
}
if (PyDict_SetItem(self->data, key, v) < 0)
return -1;
/* the dict should have a borrowed reference */
PyObject_GC_UnTrack((void *)self->data);
Py_DECREF(v);
p = (cPersistentObject *)v;
Py_INCREF(self);
p->cache = (PerCache *)self;
if (p->state >= 0)
{
/* insert this non-ghost object into the ring just
behind the home position. */
self->non_ghost_count++;
ring_add(&self->ring_home, &p->ring);
/* this list should have a new reference to the object */
Py_INCREF(v);
}
return 0;
}
static int
cc_del_item(ccobject *self, PyObject *key)
{
PyObject *v;
cPersistentObject *p;
/* unlink this item from the ring */
v = PyDict_GetItem(self->data, key);
if (v == NULL)
{
PyErr_SetObject(PyExc_KeyError, key);
return -1;
}
if (PyType_Check(v))
{
self->klass_count--;
}
else
{
p = (cPersistentObject *)v;
if (p->state >= 0)
{
self->non_ghost_count--;
ring_del(&p->ring);
/* The DelItem below will account for the reference
held by the list. */
}
else
{
/* This is a ghost object, so we haven't kept a reference
count on it. For it have stayed alive this long
someone else must be keeping a reference to
it. Therefore we need to temporarily give it back a
reference count before calling DelItem below */
Py_INCREF(v);
}
Py_DECREF((PyObject *)p->cache);
p->cache = NULL;
}
if (PyDict_DelItem(self->data, key) < 0)
{
PyErr_SetString(PyExc_RuntimeError,
"unexpectedly couldn't remove key in cc_ass_sub");
return -1;
}
return 0;
}
static int
cc_ass_sub(ccobject *self, PyObject *key, PyObject *v)
{
if (!PyString_Check(key))
{
PyErr_Format(PyExc_TypeError,
"cPickleCache key must be a string, not a %s",
key->ob_type->tp_name);
return -1;
}
if (v)
return cc_add_item(self, key, v);
else
return cc_del_item(self, key);
}
static PyMappingMethods cc_as_mapping =
{
(lenfunc)cc_length, /*mp_length*/
(binaryfunc)cc_subscript, /*mp_subscript*/
(objobjargproc)cc_ass_sub, /*mp_ass_subscript*/
};
static PyObject *
cc_cache_data(ccobject *self, void *context)
{
return PyDict_Copy(self->data);
}
static PyGetSetDef cc_getsets[] =
{
{"cache_data", (getter)cc_cache_data},
{NULL}
};
static PyMemberDef cc_members[] = {
{"cache_size", T_INT, offsetof(ccobject, cache_size)},
{"cache_size_bytes", T_LONG, offsetof(ccobject, cache_size_bytes)},
{"total_estimated_size", T_LONG, offsetof(ccobject, total_estimated_size),
RO},
{"cache_drain_resistance", T_INT,
offsetof(ccobject, cache_drain_resistance)},
{"cache_non_ghost_count", T_INT, offsetof(ccobject, non_ghost_count), RO},
{"cache_klass_count", T_INT, offsetof(ccobject, klass_count), RO},
{NULL}
};
/* This module is compiled as a shared library. Some compilers don't
allow addresses of Python objects defined in other libraries to be
used in static initializers here. The DEFERRED_ADDRESS macro is
used to tag the slots where such addresses appear; the module init
function must fill in the tagged slots at runtime. The argument is
for documentation -- the macro ignores it.
*/
#define DEFERRED_ADDRESS(ADDR) 0
static PyTypeObject Cctype = {
PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type))
0, /* ob_size */
"persistent.PickleCache", /* tp_name */
sizeof(ccobject), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)cc_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_compare */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
&cc_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
/* tp_flags */
0, /* tp_doc */
(traverseproc)cc_traverse, /* tp_traverse */
(inquiry)cc_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
cc_methods, /* tp_methods */
cc_members, /* tp_members */
cc_getsets, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
(initproc)cc_init, /* tp_init */
};
void
initcPickleCache(void)
{
PyObject *m;
Cctype.ob_type = &PyType_Type;
Cctype.tp_new = &PyType_GenericNew;
if (PyType_Ready(&Cctype) < 0)
{
return;
}
m = Py_InitModule3("cPickleCache", NULL, cPickleCache_doc_string);
capi = (cPersistenceCAPIstruct *)PyCObject_Import(
"persistent.cPersistence", "CAPI");
if (!capi)
return;
capi->percachedel = (percachedelfunc)cc_oid_unreferenced;
py__p_changed = PyString_InternFromString("_p_changed");
if (!py__p_changed)
return;
py__p_deactivate = PyString_InternFromString("_p_deactivate");
if (!py__p_deactivate)
return;
py__p_jar = PyString_InternFromString("_p_jar");
if (!py__p_jar)
return;
py__p_oid = PyString_InternFromString("_p_oid");
if (!py__p_oid)
return;
if (PyModule_AddStringConstant(m, "cache_variant", "stiff/c") < 0)
return;
/* This leaks a reference to Cctype, but it doesn't matter. */
if (PyModule_AddObject(m, "PickleCache", (PyObject *)&Cctype) < 0)
return;
}
##############################################################################
#
# Copyright Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
# persistent.dict is deprecated. User persistent.mapping
from persistent.mapping import PersistentMapping as PersistentDict
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Persistence Interfaces
"""
from zope.interface import Interface
from zope.interface import Attribute
# Allowed values for _p_state
try:
from .cPersistence import GHOST
from .cPersistence import UPTODATE
from .cPersistence import CHANGED
from .cPersistence import STICKY
except ImportError:
GHOST = -1
UPTODATE = 0
CHANGED = 1
STICKY = 2
class IPersistent(Interface):
"""Python persistent interface
A persistent object can be in one of several states:
- Unsaved
The object has been created but not saved in a data manager.
In this state, the _p_changed attribute is non-None and false
and the _p_jar attribute is None.
- Saved
The object has been saved and has not been changed since it was saved.
In this state, the _p_changed attribute is non-None and false
and the _p_jar attribute is set to a data manager.
- Sticky
This state is identical to the saved state except that the
object cannot transition to the ghost state. This is a special
state used by C methods of persistent objects to make sure that
state is not unloaded in the middle of computation.
In this state, the _p_changed attribute is non-None and false
and the _p_jar attribute is set to a data manager.
There is no Python API for detecting whether an object is in the
sticky state.
- Changed
The object has been changed.
In this state, the _p_changed attribute is true
and the _p_jar attribute is set to a data manager.
- Ghost
the object is in memory but its state has not been loaded from
the database (or its state has been unloaded). In this state,
the object doesn't contain any application data.
In this state, the _p_changed attribute is None, and the _p_jar
attribute is set to the data manager from which the object was
obtained.
In all the above, _p_oid (the persistent object id) is set when
_p_jar first gets set.
The following state transitions are possible:
- Unsaved -> Saved
This transition occurs when an object is saved in the
database. This usually happens when an unsaved object is added
to (e.g. as an attribute or item of) a saved (or changed) object
and the transaction is committed.
- Saved -> Changed
Sticky -> Changed
Ghost -> Changed
This transition occurs when someone sets an attribute or sets
_p_changed to a true value on a saved, sticky or ghost object. When
the transition occurs, the persistent object is required to call the
register() method on its data manager, passing itself as the
only argument.
Prior to ZODB 3.6, setting _p_changed to a true value on a ghost object
was ignored (the object remained a ghost, and getting its _p_changed
attribute continued to return None).
- Saved -> Sticky
This transition occurs when C code marks the object as sticky to
prevent its deactivation.
- Saved -> Ghost
This transition occurs when a saved object is deactivated or
invalidated. See discussion below.
- Sticky -> Saved
This transition occurs when C code unmarks the object as sticky to
allow its deactivation.
- Changed -> Saved
This transition occurs when a transaction is committed. After
saving the state of a changed object during transaction commit,
the data manager sets the object's _p_changed to a non-None false
value.
- Changed -> Ghost
This transition occurs when a transaction is aborted. All changed
objects are invalidated by the data manager by an abort.
- Ghost -> Saved
This transition occurs when an attribute or operation of a ghost
is accessed and the object's state is loaded from the database.
Note that there is a separate C API that is not included here.
The C API requires a specific data layout and defines the sticky
state.
About Invalidation, Deactivation and the Sticky & Ghost States
The sticky state is intended to be a short-lived state, to prevent
an object's state from being discarded while we're in C routines. It
is an error to invalidate an object in the sticky state.
Deactivation is a request that an object discard its state (become
a ghost). Deactivation is an optimization, and a request to
deactivate may be ignored. There are two equivalent ways to
request deactivation:
- call _p_deactivate()
- set _p_changed to None
There are two ways to invalidate an object: call the
_p_invalidate() method (preferred) or delete its _p_changed
attribute. This cannot be ignored, and is used when semantics
require invalidation. Normally, an invalidated object transitions
to the ghost state. However, some objects cannot be ghosts. When
these objects are invalidated, they immediately reload their state
from their data manager, and are then in the saved state.
"""
_p_jar = Attribute(
"""The data manager for the object.
The data manager implements the IPersistentDataManager interface.
If there is no data manager, then this is None.
Once assigned to a data manager, an object cannot be re-assigned
to another.
""")
_p_oid = Attribute(
"""The object id.
It is up to the data manager to assign this.
The special value None is reserved to indicate that an object
id has not been assigned. Non-None object ids must be non-empty
strings. The 8-byte string '\0'*8 (8 NUL bytes) is reserved to
identify the database root object.
Once assigned an OID, an object cannot be re-assigned another.
""")
_p_changed = Attribute(
"""The persistent state of the object.
This is one of:
None -- The object is a ghost.
false but not None -- The object is saved (or has never been saved).
true -- The object has been modified since it was last saved.
The object state may be changed by assigning or deleting this
attribute; however, assigning None is ignored if the object is
not in the saved state, and may be ignored even if the object is
in the saved state.
At and after ZODB 3.6, setting _p_changed to a true value for a ghost
object activates the object; prior to 3.6, setting _p_changed to a
true value on a ghost object was ignored.
Note that an object can transition to the changed state only if
it has a data manager. When such a state change occurs, the
'register' method of the data manager must be called, passing the
persistent object.
Deleting this attribute forces invalidation independent of
existing state, although it is an error if the sticky state is
current.
""")
_p_serial = Attribute(
"""The object serial number.
This member is used by the data manager to distiguish distinct
revisions of a given persistent object.
This is an 8-byte string (not Unicode).
""")
_p_mtime = Attribute(
"""The object's modification time (read-only).
This is a float, representing seconds since the epoch (as returned
by time.time).
""")
_p_state = Attribute(
"""The object's persistence state token.
Must be one of GHOST, UPTODATE, CHANGED, or STICKY.
""")
_p_estimated_size = Attribute(
"""An estimate of the object's size in bytes.
May be set by the data manager.
""")
# Attribute access protocol
def __getattribute__(name):
""" Handle activating ghosts before returning an attribute value.
"Special" attributes and '_p_*' attributes don't require activation.
"""
def __setattr__(name, value):
""" Handle activating ghosts before setting an attribute value.
"Special" attributes and '_p_*' attributes don't require activation.
"""
def __delattr__(name):
""" Handle activating ghosts before deleting an attribute value.
"Special" attributes and '_p_*' attributes don't require activation.
"""
# Pickling protocol.
def __getstate__():
"""Get the object data.
The state should not include persistent attributes ("_p_name").
The result must be picklable.
"""
def __setstate__(state):
"""Set the object data.
"""
def __reduce__():
"""Reduce an object to contituent parts for serialization.
"""
# Custom methods
def _p_activate():
"""Activate the object.
Change the object to the saved state if it is a ghost.
"""
def _p_deactivate():
"""Deactivate the object.
Possibly change an object in the saved state to the
ghost state. It may not be possible to make some persistent
objects ghosts, and, for optimization reasons, the implementation
may choose to keep an object in the saved state.
"""
def _p_invalidate():
"""Invalidate the object.
Invalidate the object. This causes any data to be thrown
away, even if the object is in the changed state. The object
is moved to the ghost state; further accesses will cause
object data to be reloaded.
"""
def _p_getattr(name):
"""Test whether the base class must handle the name
The method unghostifies the object, if necessary.
The method records the object access, if necessary.
This method should be called by subclass __getattribute__
implementations before doing anything else. If the method
returns True, then __getattribute__ implementations must delegate
to the base class, Persistent.
"""
def _p_setattr(name, value):
"""Save persistent meta data
This method should be called by subclass __setattr__ implementations
before doing anything else. If it returns true, then the attribute
was handled by the base class.
The method unghostifies the object, if necessary.
The method records the object access, if necessary.
"""
def _p_delattr(name):
"""Delete persistent meta data
This method should be called by subclass __delattr__ implementations
before doing anything else. If it returns true, then the attribute
was handled by the base class.
The method unghostifies the object, if necessary.
The method records the object access, if necessary.
"""
# TODO: document conflict resolution.
class IPersistentDataManager(Interface):
"""Provide services for managing persistent state.
This interface is used by a persistent object to interact with its
data manager in the context of a transaction.
"""
_cache = Attribute("The pickle cache associated with this connection.")
def setstate(object):
"""Load the state for the given object.
The object should be in the ghost state. The object's state will be
set and the object will end up in the saved state.
The object must provide the IPersistent interface.
"""
def oldstate(obj, tid):
"""Return copy of 'obj' that was written by transaction 'tid'.
The returned object does not have the typical metadata (_p_jar, _p_oid,
_p_serial) set. I'm not sure how references to other peristent objects
are handled.
Parameters
obj: a persistent object from this Connection.
tid: id of a transaction that wrote an earlier revision.
Raises KeyError if tid does not exist or if tid deleted a revision of
obj.
"""
def register(object):
"""Register an IPersistent with the current transaction.
This method must be called when the object transitions to
the changed state.
A subclass could override this method to customize the default
policy of one transaction manager for each thread.
"""
# Maybe later:
## def mtime(object):
## """Return the modification time of the object.
## The modification time may not be known, in which case None
## is returned. If non-None, the return value is the kind of
## timestamp supplied by Python's time.time().
## """
class IPickleCache(Interface):
""" API of the cache for a ZODB connection.
"""
def __getitem__(oid):
""" -> the persistent object for OID.
o Raise KeyError if not found.
"""
def __setitem__(oid, value):
""" Save the persistent object under OID.
o 'oid' must be a string, else raise ValueError.
o Raise KeyError on duplicate
"""
def __delitem__(oid):
""" Remove the persistent object for OID.
o 'oid' must be a string, else raise ValueError.
o Raise KeyError if not found.
"""
def get(oid, default=None):
""" -> the persistent object for OID.
o Return 'default' if not found.
"""
def mru(oid):
""" Move the element corresonding to 'oid' to the head.
o Raise KeyError if no element is found.
"""
def __len__():
""" -> the number of OIDs in the cache.
"""
def items():
"""-> a sequence of tuples (oid, value) for cached objects.
o Only includes items in 'data' (no p-classes).
"""
def ringlen():
""" -> the number of persistent objects in the ring.
o Only includes items in the ring (no ghosts or p-classes).
"""
def lru_items():
""" -> a sequence of tuples (oid, value) for cached objects.
o Tuples will be in LRU order.
o Only includes items in the ring (no ghosts or p-classes).
"""
def klass_items():
"""-> a sequence of tuples (oid, value) for cached p-classes.
o Only includes persistent classes.
"""
def incrgc():
""" Perform an incremental garbage collection sweep.
o Reduce number of non-ghosts to 'cache_size', if possible.
o Ghostify in LRU order.
o Skip dirty or sticky objects.
o Quit once we get down to 'cache_size'.
"""
def full_sweep():
""" Perform a full garbage collection sweep.
o Reduce number of non-ghosts to 0, if possible.
o Ghostify all non-sticky / non-changed objecs.
"""
def minimize():
""" Alias for 'full_sweep'.
o XXX?
"""
def new_ghost(oid, obj):
""" Add the given (ghost) object to the cache.
Also, set its _p_jar and _p_oid, and ensure it is in the
GHOST state.
If the object doesn't define '_p_oid' / '_p_jar', raise.
If the object's '_p_oid' is not None, raise.
If the object's '_p_jar' is not None, raise.
If 'oid' is already in the cache, raise.
"""
def reify(to_reify):
""" Reify the indicated objects.
o If 'to_reify' is a string, treat it as an OID.
o Otherwise, iterate over it as a sequence of OIDs.
o For each OID, if present in 'data' and in GHOST state:
o Call '_p_activate' on the object.
o Add it to the ring.
o If any OID is present but not in GHOST state, skip it.
o Raise KeyErrory if any OID is not present.
"""
def invalidate(to_invalidate):
""" Invalidate the indicated objects.
o If 'to_invalidate' is a string, treat it as an OID.
o Otherwise, iterate over it as a sequence of OIDs.
o Any OID corresponding to a p-class will cause the corresponding
p-class to be removed from the cache.
o For all other OIDs, ghostify the corrsponding object and
remove it from the ring.
"""
cache_size = Attribute(u'Target size of the cache')
cache_drain_resistance = Attribute(u'Factor for draining cache below '
u'target size')
cache_non_ghost_count = Attribute(u'Number of non-ghosts in the cache '
u'(XXX how is it different from '
u'ringlen?')
cache_data = Attribute(u"Property: copy of our 'data' dict")
cache_klass_count = Attribute(u"Property: len of 'persistent_classes'")
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Python implementation of persistent list.
$Id$"""
import persistent
from UserList import UserList
class PersistentList(UserList, persistent.Persistent):
__super_setitem = UserList.__setitem__
__super_delitem = UserList.__delitem__
__super_setslice = UserList.__setslice__
__super_delslice = UserList.__delslice__
__super_iadd = UserList.__iadd__
__super_imul = UserList.__imul__
__super_append = UserList.append
__super_insert = UserList.insert
__super_pop = UserList.pop
__super_remove = UserList.remove
__super_reverse = UserList.reverse
__super_sort = UserList.sort
__super_extend = UserList.extend
def __setitem__(self, i, item):
self.__super_setitem(i, item)
self._p_changed = 1
def __delitem__(self, i):
self.__super_delitem(i)
self._p_changed = 1
def __setslice__(self, i, j, other):
self.__super_setslice(i, j, other)
self._p_changed = 1
def __delslice__(self, i, j):
self.__super_delslice(i, j)
self._p_changed = 1
def __iadd__(self, other):
L = self.__super_iadd(other)
self._p_changed = 1
return L
def __imul__(self, n):
L = self.__super_imul(n)
self._p_changed = 1
return L
def append(self, item):
self.__super_append(item)
self._p_changed = 1
def insert(self, i, item):
self.__super_insert(i, item)
self._p_changed = 1
def pop(self, i=-1):
rtn = self.__super_pop(i)
self._p_changed = 1
return rtn
def remove(self, item):
self.__super_remove(item)
self._p_changed = 1
def reverse(self):
self.__super_reverse()
self._p_changed = 1
def sort(self, *args, **kwargs):
self.__super_sort(*args, **kwargs)
self._p_changed = 1
def extend(self, other):
self.__super_extend(other)
self._p_changed = 1
# This works around a bug in Python 2.1.x (up to 2.1.2 at least) where the
# __cmp__ bogusly raises a RuntimeError, and because this is an extension
# class, none of the rich comparison stuff works anyway.
def __cmp__(self, other):
return cmp(self.data, self._UserList__cast(other))
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
"""Python implementation of persistent base types
$Id$"""
import persistent
import UserDict
class default(object):
def __init__(self, func):
self.func = func
def __get__(self, inst, class_):
if inst is None:
return self
return self.func(inst)
class PersistentMapping(UserDict.IterableUserDict, persistent.Persistent):
"""A persistent wrapper for mapping objects.
This class allows wrapping of mapping objects so that object
changes are registered. As a side effect, mapping objects may be
subclassed.
A subclass of PersistentMapping or any code that adds new
attributes should not create an attribute named _container. This
is reserved for backwards compatibility reasons.
"""
# UserDict provides all of the mapping behavior. The
# PersistentMapping class is responsible marking the persistent
# state as changed when a method actually changes the state. At
# the mapping API evolves, we may need to add more methods here.
__super_delitem = UserDict.IterableUserDict.__delitem__
__super_setitem = UserDict.IterableUserDict.__setitem__
__super_clear = UserDict.IterableUserDict.clear
__super_update = UserDict.IterableUserDict.update
__super_setdefault = UserDict.IterableUserDict.setdefault
__super_pop = UserDict.IterableUserDict.pop
__super_popitem = UserDict.IterableUserDict.popitem
def __delitem__(self, key):
self.__super_delitem(key)
self._p_changed = 1
def __setitem__(self, key, v):
self.__super_setitem(key, v)
self._p_changed = 1
def clear(self):
self.__super_clear()
self._p_changed = 1
def update(self, b):
self.__super_update(b)
self._p_changed = 1
def setdefault(self, key, failobj=None):
# We could inline all of UserDict's implementation into the
# method here, but I'd rather not depend at all on the
# implementation in UserDict (simple as it is).
if not self.has_key(key):
self._p_changed = 1
return self.__super_setdefault(key, failobj)
def pop(self, key, *args):
self._p_changed = 1
return self.__super_pop(key, *args)
def popitem(self):
self._p_changed = 1
return self.__super_popitem()
# Old implementations used _container rather than data.
# Use a descriptor to provide data when we have _container instead
@default
def data(self):
# We don't want to cause a write on read, so wer're careful not to
# do anything that would cause us to become marked as changed, however,
# if we're modified, then the saved record will have data, not
# _container.
data = self.__dict__.pop('_container')
self.__dict__['data'] = data
return data
##############################################################################
#
# Copyright (c) 2009 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import gc
import weakref
from zope.interface import implements
from persistent.interfaces import CHANGED
from persistent.interfaces import GHOST
from persistent.interfaces import IPickleCache
from persistent.interfaces import STICKY
class RingNode(object):
# 32 byte fixed size wrapper.
__slots__ = ('object', 'next', 'prev')
def __init__(self, object, next=None, prev=None):
self.object = object
self.next = next
self.prev = prev
class PickleCache(object):
implements(IPickleCache)
def __init__(self, jar, target_size=0, cache_size_bytes=0):
# TODO: forward-port Dieter's bytes stuff
self.jar = jar
self.target_size = target_size
self.drain_resistance = 0
self.non_ghost_count = 0
self.persistent_classes = {}
self.data = weakref.WeakValueDictionary()
self.ring = RingNode(None)
self.ring.next = self.ring.prev = self.ring
# IPickleCache API
def __len__(self):
""" See IPickleCache.
"""
return (len(self.persistent_classes) +
len(self.data))
def __getitem__(self, oid):
""" See IPickleCache.
"""
value = self.data.get(oid)
if value is not None:
return value
return self.persistent_classes[oid]
def __setitem__(self, oid, value):
""" See IPickleCache.
"""
if not isinstance(oid, str): # XXX bytes
raise ValueError('OID must be string: %s' % oid)
# XXX
if oid in self.persistent_classes or oid in self.data:
raise KeyError('Duplicate OID: %s' % oid)
if type(value) is type:
self.persistent_classes[oid] = value
else:
self.data[oid] = value
if value._p_state != GHOST:
self.non_ghost_count += 1
mru = self.ring.prev
self.ring.prev = node = RingNode(value, self.ring, mru)
mru.next = node
def __delitem__(self, oid):
""" See IPickleCache.
"""
if not isinstance(oid, str):
raise ValueError('OID must be string: %s' % oid)
if oid in self.persistent_classes:
del self.persistent_classes[oid]
else:
value = self.data.pop(oid)
node = self.ring.next
if node is None:
return
while node is not self.ring:
if node.object is value:
node.prev.next, node.next.prev = node.next, node.prev
self.non_ghost_count -= 1
break
node = node.next
def get(self, oid, default=None):
""" See IPickleCache.
"""
value = self.data.get(oid, self)
if value is not self:
return value
return self.persistent_classes.get(oid, default)
def mru(self, oid):
""" See IPickleCache.
"""
node = self.ring.next
while node is not self.ring and node.object._p_oid != oid:
node = node.next
if node is self.ring:
value = self.data[oid]
if value._p_state != GHOST:
self.non_ghost_count += 1
mru = self.ring.prev
self.ring.prev = node = RingNode(value, self.ring, mru)
mru.next = node
else:
# remove from old location
node.prev.next, node.next.prev = node.next, node.prev
# splice into new
self.ring.prev.next, node.prev = node, self.ring.prev
self.ring.prev, node.next = node, self.ring
def ringlen(self):
""" See IPickleCache.
"""
result = 0
node = self.ring.next
while node is not self.ring:
result += 1
node = node.next
return result
def items(self):
""" See IPickleCache.
"""
return self.data.items()
def lru_items(self):
""" See IPickleCache.
"""
result = []
node = self.ring.next
while node is not self.ring:
result.append((node.object._p_oid, node.object))
node = node.next
return result
def klass_items(self):
""" See IPickleCache.
"""
return self.persistent_classes.items()
def incrgc(self, ignored=None):
""" See IPickleCache.
"""
target = self.target_size
if self.drain_resistance >= 1:
size = self.non_ghost_count
target2 = size - 1 - (size / self.drain_resistance)
if target2 < target:
target = target2
self._sweep(target)
def full_sweep(self, target=None):
""" See IPickleCache.
"""
self._sweep(0)
minimize = full_sweep
def new_ghost(self, oid, obj):
""" See IPickleCache.
"""
if obj._p_oid is not None:
raise ValueError('Object already has oid')
if obj._p_jar is not None:
raise ValueError('Object already has jar')
if oid in self.persistent_classes or oid in self.data:
raise KeyError('Duplicate OID: %s' % oid)
obj._p_oid = oid
obj._p_jar = self.jar
if type(obj) is not type:
if obj._p_state != GHOST:
obj._p_invalidate()
self[oid] = obj
def reify(self, to_reify):
""" See IPickleCache.
"""
if isinstance(to_reify, str): #bytes
to_reify = [to_reify]
for oid in to_reify:
value = self[oid]
if value._p_state == GHOST:
value._p_activate()
self.non_ghost_count += 1
mru = self.ring.prev
self.ring.prev = node = RingNode(value, self.ring, mru)
mru.next = node
def invalidate(self, to_invalidate):
""" See IPickleCache.
"""
if isinstance(to_invalidate, str):
self._invalidate(to_invalidate)
else:
for oid in to_invalidate:
self._invalidate(oid)
def debug_info(self):
result = []
for oid, klass in self.persistent_classes.items():
result.append((oid,
len(gc.getreferents(klass)),
type(klass).__name__,
klass._p_state,
))
for oid, value in self.data.items():
result.append((oid,
len(gc.getreferents(value)),
type(value).__name__,
value._p_state,
))
return result
cache_size = property(lambda self: self.target_size)
cache_drain_resistance = property(lambda self: self.drain_resistance)
cache_non_ghost_count = property(lambda self: self.non_ghost_count)
cache_data = property(lambda self: dict(self.data.items()))
cache_klass_count = property(lambda self: len(self.persistent_classes))
# Helpers
def _sweep(self, target):
# lock
node = self.ring.next
while node is not self.ring and self.non_ghost_count > target:
if node.object._p_state not in (STICKY, CHANGED):
node.prev.next, node.next.prev = node.next, node.prev
node.object = None
self.non_ghost_count -= 1
node = node.next
def _invalidate(self, oid):
value = self.data.get(oid)
if value is not None and value._p_state != GHOST:
value._p_invalidate()
node = self.ring.next
while node is not self.ring:
if node.object is value:
node.prev.next, node.next.prev = node.next, node.prev
break
elif oid in self.persistent_classes:
del self.persistent_classes[oid]
/* Backport type definitions from Python 2.5's object.h */
#ifndef PERSISTENT_PY24COMPAT_H
#define PERSISTENT_PY24COMPAT_H
#if PY_VERSION_HEX < 0x02050000
typedef int Py_ssize_t;
#define PY_SSIZE_T_MAX INT_MAX
#define PY_SSIZE_T_MIN INT_MIN
#endif /* PY_VERSION_HEX */
#endif /* PERSISTENT_PY24COMPAT_H */
##############################################################################
#
# Copyright (c) 2011 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from copy_reg import __newobj__
import struct
import sys
from zope.interface import implements
from persistent.interfaces import IPersistent
from persistent.interfaces import IPersistentDataManager
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
from persistent.interfaces import CHANGED
from persistent.interfaces import STICKY
from persistent.timestamp import TimeStamp
if sys.version_info < (2, 6,):
OID_TYPE = SERIAL_TYPE = str
else:
OID_TYPE = SERIAL_TYPE = bytes
def _makeOctets(s):
if sys.version_info < (2, 6,):
return str(s)
if sys.version_info < (3,):
return bytes(s)
return bytes(s, 'ascii')
_INITIAL_SERIAL = _makeOctets('\x00' * 8)
# Bitwise flags
_CHANGED = 0x0001
_STICKY = 0x0002
_OGA = object.__getattribute__
_OSA = object.__setattr__
# These names can be used from a ghost without causing it to be activated.
SPECIAL_NAMES = ('__class__',
'__del__',
'__dict__',
'__of__',
'__setstate__'
)
_SCONV = 60.0 / (1<<16) / (1<<16)
def makeTimestamp(year, month, day, hour, minute, second):
a = (((year - 1900) * 12 + month - 1) * 31 + day - 1)
a = (a * 24 + hour) * 60 + minute
b = int(second / _SCONV)
return struct.pack('>II', a, b)
def parseTimestamp(octets):
a, b = struct.unpack('>II', octets)
minute = a % 60
hour = a // 60 % 24
day = a // (60 * 24) % 31 + 1
month = a // (60 * 24 * 31) % 12 + 1
year = a // (60 * 24 * 31 * 12) + 1900
second = b * _SCONV
return (year, month, day, hour, minute, second)
class Persistent(object):
""" Pure Python implmentation of Persistent base class
"""
__slots__ = ('__jar', '__oid', '__serial', '__flags')
implements(IPersistent)
def __new__(cls):
inst = super(Persistent, cls).__new__(cls)
inst.__jar = inst.__oid = inst.__serial = None
inst.__flags = None
return inst
# _p_jar: see IPersistent.
def _get_jar(self):
return self.__jar
def _set_jar(self, value):
if value is self.__jar:
return
if self.__jar is not None:
raise ValueError('Already assigned a data manager')
if not IPersistentDataManager.providedBy(value):
raise ValueError('Not a data manager: %s' % value)
self.__jar = value
_p_jar = property(_get_jar, _set_jar)
# _p_oid: see IPersistent.
def _get_oid(self):
return self.__oid
def _set_oid(self, value):
if value == self.__oid:
return
if value is not None:
if not isinstance(value, OID_TYPE):
raise ValueError('Invalid OID type: %s' % value)
if self.__jar is not None and self.__oid is not None:
raise ValueError('Already assigned an OID by our jar')
self.__oid = value
def _del_oid(self):
if self.__jar is not None:
raise ValueError('Cannot delete OID once assigned to a jar')
self.__oid = None
_p_oid = property(_get_oid, _set_oid, _del_oid)
# _p_serial: see IPersistent.
def _get_serial(self):
if self.__serial is not None:
return self.__serial
return _INITIAL_SERIAL
def _set_serial(self, value):
if value is not None:
if not isinstance(value, SERIAL_TYPE):
raise ValueError('Invalid SERIAL type: %s' % value)
if len(value) != 8:
raise ValueError('SERIAL must be 8 octets')
self.__serial = value
def _del_serial(self):
self.__serial = None
_p_serial = property(_get_serial, _set_serial, _del_serial)
# _p_changed: see IPersistent.
def _get_changed(self):
if self.__flags is None: # ghost
return None
return self.__flags & _CHANGED
def _set_changed(self, value):
if self.__flags is None:
if value is not None:
self._p_activate()
self._p_set_changed_flag(value)
else:
if value is None: # -> ghost
self._p_deactivate()
else:
self._p_set_changed_flag(value)
def _del_changed(self):
self._p_invalidate()
_p_changed = property(_get_changed, _set_changed, _del_changed)
# _p_mtime
def _get_mtime(self):
if self.__serial is not None:
ts = TimeStamp(self.__serial)
return ts.timeTime()
_p_mtime = property(_get_mtime)
# _p_state
def _get_state(self):
if self.__flags is None:
if self.__jar is None:
return UPTODATE
return GHOST
if self.__flags & _CHANGED:
if self.__jar is None:
return UPTODATE
result = CHANGED
else:
result = UPTODATE
if self.__flags & _STICKY:
return STICKY
return result
_p_state = property(_get_state)
# _p_estimated_size: XXX don't want to reserve the space?
def _get_estimated_size(self):
return 0
def _set_estimated_size(self, value):
pass
_p_estimated_size = property(_get_estimated_size, _set_estimated_size)
# The '_p_sticky' property is not (yet) part of the API: for now,
# it exists to simplify debugging and testing assertions.
def _get_sticky(self):
if self.__flags is None:
return False
return self.__flags & _STICKY
def _set_sticky(self, value):
if self.__flags is None:
raise ValueError('Ghost')
if value:
self.__flags |= _STICKY
else:
self.__flags &= ~_STICKY
_p_sticky = property(_get_sticky, _set_sticky)
# The '_p_status' property is not (yet) part of the API: for now,
# it exists to simplify debugging and testing assertions.
def _get_status(self):
if self.__flags is None:
if self.__jar is None:
return 'new'
return 'ghost'
if self.__flags & _CHANGED:
if self.__jar is None:
return 'unsaved'
result = 'changed'
else:
result = 'saved'
if self.__flags & _STICKY:
return '%s (sticky)' % result
return result
_p_status = property(_get_status)
# Methods from IPersistent.
def __getattribute__(self, name):
""" See IPersistent.
"""
if (not name.startswith('_Persistent__') and
not name.startswith('_p_') and
name not in SPECIAL_NAMES):
if _OGA(self, '_Persistent__flags') is None:
_OGA(self, '_p_activate')()
_OGA(self, '_p_accessed')()
return _OGA(self, name)
def __setattr__(self, name, value):
special_name = (name.startswith('_Persistent__') or
name.startswith('_p_'))
if not special_name:
if _OGA(self, '_Persistent__flags') is None:
_OGA(self, '_p_activate')()
_OGA(self, '_p_accessed')()
_OSA(self, name, value)
if not special_name:
before = _OGA(self, '_Persistent__flags')
after = before | _CHANGED
if before != after:
_OSA(self, '_Persistent__flags', after)
if (_OGA(self, '_Persistent__jar') is not None and
_OGA(self, '_Persistent__oid') is not None):
_OGA(self, '_p_register')()
def __delattr__(self, name):
special_name = (name.startswith('_Persistent__') or
name.startswith('_p_'))
if not special_name:
if _OGA(self, '_Persistent__flags') is None:
_OGA(self, '_p_activate')()
_OGA(self, '_p_accessed')()
object.__delattr__(self, name)
if not special_name:
before = _OGA(self, '_Persistent__flags')
after = before | _CHANGED
if before != after:
_OSA(self, '_Persistent__flags', after)
if (_OGA(self, '_Persistent__jar') is not None and
_OGA(self, '_Persistent__oid') is not None):
_OGA(self, '_p_register')()
def __getstate__(self):
""" See IPersistent.
"""
idict = getattr(self, '__dict__', None)
if idict is not None:
return dict([x for x in idict.items()
if not x[0].startswith('_p_') and
not x[0].startswith('_v_')])
return ()
def __setstate__(self, state):
""" See IPersistent.
"""
idict = getattr(self, '__dict__', None)
if idict is not None:
idict.clear()
idict.update(state)
else:
if state != ():
raise ValueError('No state allowed on base Persistent class')
def __reduce__(self):
""" See IPersistent.
"""
gna = getattr(self, '__getnewargs__', lambda: ())
return (__newobj__, (type(self),) + gna(), self.__getstate__())
def _p_activate(self):
""" See IPersistent.
"""
if self.__flags is None:
self.__flags = 0
if self.__jar is not None and self.__oid is not None:
self.__jar.setstate(self)
def _p_deactivate(self):
""" See IPersistent.
"""
if self.__flags is not None and not self.__flags & _CHANGED:
self._p_invalidate()
def _p_invalidate(self):
""" See IPersistent.
"""
if self.__flags is not None and self.__flags & _STICKY:
raise ValueError('Sticky')
self.__flags = None
idict = getattr(self, '__dict__', None)
if idict is not None:
idict.clear()
def _p_getattr(self, name):
""" See IPersistent.
"""
if name.startswith('_p_') or name in SPECIAL_NAMES:
return True
self._p_activate()
self._p_accessed()
return False
def _p_setattr(self, name, value):
""" See IPersistent.
"""
if name.startswith('_p_'):
setattr(self, name, value)
return True
self._p_activate()
self._p_accessed()
return False
def _p_delattr(self, name):
""" See IPersistent.
"""
if name.startswith('_p_'):
delattr(self, name)
return True
self._p_activate()
self._p_accessed()
return False
# Helper methods: not APIs: we name them with '_p_' to bypass
# the __getattribute__ bit which bumps the cache.
def _p_register(self):
if self.__jar is not None and self.__oid is not None:
self.__jar.register(self)
def _p_set_changed_flag(self, value):
if value:
before = self.__flags
self.__flags |= _CHANGED
if before != self.__flags:
self._p_register()
else:
self.__flags &= ~_CHANGED
def _p_accessed(self):
# Notify the jar's pickle cache that we have been accessed.
# This relies on what has been (until now) an implementation
# detail, the '_cache' attribute of the jar. We made it a
# private API to avoid the cycle of keeping a reference to
# the cache on the persistent object.
if self.__jar is not None and self.__oid is not None:
self.__jar._cache.mru(self.__oid)
/*****************************************************************************
Copyright (c) 2003 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
#define RING_C "$Id$\n"
/* Support routines for the doubly-linked list of cached objects.
The cache stores a doubly-linked list of persistent objects, with
space for the pointers allocated in the objects themselves. The cache
stores the distinguished head of the list, which is not a valid
persistent object.
The next pointers traverse the ring in order starting with the least
recently used object. The prev pointers traverse the ring in order
starting with the most recently used object.
*/
#include "Python.h"
#include "ring.h"
void
ring_add(CPersistentRing *ring, CPersistentRing *elt)
{
assert(!elt->r_next);
elt->r_next = ring;
elt->r_prev = ring->r_prev;
ring->r_prev->r_next = elt;
ring->r_prev = elt;
}
void
ring_del(CPersistentRing *elt)
{
elt->r_next->r_prev = elt->r_prev;
elt->r_prev->r_next = elt->r_next;
elt->r_next = NULL;
elt->r_prev = NULL;
}
void
ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt)
{
elt->r_prev->r_next = elt->r_next;
elt->r_next->r_prev = elt->r_prev;
elt->r_next = ring;
elt->r_prev = ring->r_prev;
ring->r_prev->r_next = elt;
ring->r_prev = elt;
}
/*****************************************************************************
Copyright (c) 2003 Zope Foundation and Contributors.
All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
/* Support routines for the doubly-linked list of cached objects.
The cache stores a headed, doubly-linked, circular list of persistent
objects, with space for the pointers allocated in the objects themselves.
The cache stores the distinguished head of the list, which is not a valid
persistent object. The other list members are non-ghost persistent
objects, linked in LRU (least-recently used) order.
The r_next pointers traverse the ring starting with the least recently used
object. The r_prev pointers traverse the ring starting with the most
recently used object.
Obscure: While each object is pointed at twice by list pointers (once by
its predecessor's r_next, again by its successor's r_prev), the refcount
on the object is bumped only by 1. This leads to some possibly surprising
sequences of incref and decref code. Note that since the refcount is
bumped at least once, the list does hold a strong reference to each
object in it.
*/
typedef struct CPersistentRing_struct
{
struct CPersistentRing_struct *r_prev;
struct CPersistentRing_struct *r_next;
} CPersistentRing;
/* The list operations here take constant time independent of the
* number of objects in the list:
*/
/* Add elt as the most recently used object. elt must not already be
* in the list, although this isn't checked.
*/
void ring_add(CPersistentRing *ring, CPersistentRing *elt);
/* Remove elt from the list. elt must already be in the list, although
* this isn't checked.
*/
void ring_del(CPersistentRing *elt);
/* elt must already be in the list, although this isn't checked. It's
* unlinked from its current position, and relinked into the list as the
* most recently used object (which is arguably the tail of the list
* instead of the head -- but the name of this function could be argued
* either way). This is equivalent to
*
* ring_del(elt);
* ring_add(ring, elt);
*
* but may be a little quicker.
*/
void ring_move_to_head(CPersistentRing *ring, CPersistentRing *elt);
Tests for `persistent.Persistent`
=================================
This document is an extended doc test that covers the basics of the
Persistent base class. The test expects a class named `P` to be
provided in its globals. The `P` class implements the `Persistent`
interface.
Test framework
--------------
The class `P` needs to behave like `ExampleP`. (Note that the code below
is *not* part of the tests.)
::
class ExampleP(Persistent):
def __init__(self):
self.x = 0
def inc(self):
self.x += 1
The tests use stub data managers. A data manager is responsible for
loading and storing the state of a persistent object. It's stored in
the ``_p_jar`` attribute of a persistent object.
>>> class DM:
... def __init__(self):
... self.called = 0
... def register(self, ob):
... self.called += 1
... def setstate(self, ob):
... ob.__setstate__({'x': 42})
>>> class BrokenDM(DM):
... def register(self,ob):
... self.called += 1
... raise NotImplementedError
... def setstate(self,ob):
... raise NotImplementedError
>>> from persistent import Persistent
Test Persistent without Data Manager
------------------------------------
First do some simple tests of a Persistent instance that does not have
a data manager (``_p_jar``).
>>> p = P()
>>> p.x
0
>>> p._p_changed
False
>>> p._p_state
0
>>> p._p_jar
>>> p._p_oid
Verify that modifications have no effect on ``_p_state`` of ``_p_changed``.
>>> p.inc()
>>> p.inc()
>>> p.x
2
>>> p._p_changed
False
>>> p._p_state
0
Try all sorts of different ways to change the object's state.
>>> p._p_deactivate()
>>> p._p_state
0
>>> p._p_changed = True
>>> p._p_state
0
>>> del p._p_changed
>>> p._p_changed
False
>>> p._p_state
0
>>> p.x
2
We can store a size estimation in ``_p_estimated_size``. Its default is 0.
The size estimation can be used by a cache associated with the data manager
to help in the implementation of its replacement strategy or its size bounds.
Of course, the estimated size must not be negative.
>>> p._p_estimated_size
0
>>> p._p_estimated_size = 1000
>>> p._p_estimated_size
1024
Huh? Why is the estimated size coming out different than what we put
in? The reason is that the size isn't stored exactly. For backward
compatibility reasons, the size needs to fit in 24 bits, so,
internally, it is adjusted somewhat.
>>> p._p_estimated_size = -1
Traceback (most recent call last):
....
ValueError: _p_estimated_size must not be negative
Test Persistent with Data Manager
---------------------------------
Next try some tests of an object with a data manager. The `DM` class is
a simple testing stub.
>>> p = P()
>>> dm = DM()
>>> p._p_oid = "00000012"
>>> p._p_jar = dm
>>> p._p_changed
0
>>> dm.called
0
Modifying the object marks it as changed and registers it with the data
manager. Subsequent modifications don't have additional side-effects.
>>> p.inc()
>>> p._p_changed
1
>>> dm.called
1
>>> p.inc()
>>> p._p_changed
1
>>> dm.called
1
It's not possible to deactivate a modified object.
>>> p._p_deactivate()
>>> p._p_changed
1
It is possible to invalidate it. That's the key difference between
deactivation and invalidation.
>>> p._p_invalidate()
>>> p._p_state
-1
Now that the object is a ghost, any attempt to modify it will require that it
be unghosted first. The test data manager has the odd property that it sets
the object's ``x`` attribute to ``42`` when it is unghosted.
>>> p.inc()
>>> p.x
43
>>> dm.called
2
You can manually reset the changed field to ``False``, although it's not clear
why you would want to do that. The object changes to the ``UPTODATE`` state
but retains its modifications.
>>> p._p_changed = False
>>> p._p_state
0
>>> p._p_changed
False
>>> p.x
43
>>> p.inc()
>>> p._p_changed
True
>>> dm.called
3
``__getstate__()`` and ``__setstate__()``
-----------------------------------------
The next several tests cover the ``__getstate__()`` and ``__setstate__()``
implementations.
>>> p = P()
>>> state = p.__getstate__()
>>> isinstance(state, dict)
True
>>> state['x']
0
>>> p._p_state
0
Calling setstate always leaves the object in the uptodate state?
(I'm not entirely clear on this one.)
>>> p.__setstate__({'x': 5})
>>> p._p_state
0
Assigning to a volatile attribute has no effect on the object state.
>>> p._v_foo = 2
>>> p.__getstate__()
{'x': 5}
>>> p._p_state
0
The ``_p_serial`` attribute is not affected by calling setstate.
>>> p._p_serial = "00000012"
>>> p.__setstate__(p.__getstate__())
>>> p._p_serial
'00000012'
Change Ghost test
-----------------
If an object is a ghost and its ``_p_changed`` is set to ``True`` (any true
value), it should activate (unghostify) the object. This behavior is new in
ZODB 3.6; before then, an attempt to do ``ghost._p_changed = True`` was
ignored.
>>> p = P()
>>> p._p_jar = DM()
>>> p._p_oid = 1
>>> p._p_deactivate()
>>> p._p_changed # None
>>> p._p_state # ghost state
-1
>>> p._p_changed = True
>>> p._p_changed
1
>>> p._p_state # changed state
1
>>> p.x
42
Activate, deactivate, and invalidate
------------------------------------
Some of these tests are redundant, but are included to make sure there
are explicit and simple tests of ``_p_activate()``, ``_p_deactivate()``, and
``_p_invalidate()``.
>>> p = P()
>>> p._p_oid = 1
>>> p._p_jar = DM()
>>> p._p_deactivate()
>>> p._p_state
-1
>>> p._p_activate()
>>> p._p_state
0
>>> p.x
42
>>> p.inc()
>>> p.x
43
>>> p._p_state
1
>>> p._p_invalidate()
>>> p._p_state
-1
>>> p.x
42
Test failures
-------------
The following tests cover various errors cases.
When an object is modified, it registers with its data manager. If that
registration fails, the exception is propagated and the object stays in the
up-to-date state. It shouldn't change to the modified state, because it won't
be saved when the transaction commits.
>>> p = P()
>>> p._p_oid = 1
>>> p._p_jar = BrokenDM()
>>> p._p_state
0
>>> p._p_jar.called
0
>>> p._p_changed = 1
Traceback (most recent call last):
...
NotImplementedError
>>> p._p_jar.called
1
>>> p._p_state
0
Make sure that exceptions that occur inside the data manager's ``setstate()``
method propagate out to the caller.
>>> p = P()
>>> p._p_oid = 1
>>> p._p_jar = BrokenDM()
>>> p._p_deactivate()
>>> p._p_state
-1
>>> p._p_activate()
Traceback (most recent call last):
...
NotImplementedError
>>> p._p_state
-1
Special test to cover layout of ``__dict__``
--------------------------------------------
We once had a bug in the `Persistent` class that calculated an incorrect
offset for the ``__dict__`` attribute. It assigned ``__dict__`` and
``_p_jar`` to the same location in memory. This is a simple test to make sure
they have different locations.
>>> p = P()
>>> p.inc()
>>> p.inc()
>>> 'x' in p.__dict__
True
>>> p._p_jar
Inheritance and metaclasses
---------------------------
Simple tests to make sure it's possible to inherit from the `Persistent` base
class multiple times. There used to be metaclasses involved in `Persistent`
that probably made this a more interesting test.
>>> class A(Persistent):
... pass
>>> class B(Persistent):
... pass
>>> class C(A, B):
... pass
>>> class D(object):
... pass
>>> class E(D, B):
... pass
>>> a = A()
>>> b = B()
>>> c = C()
>>> d = D()
>>> e = E()
Also make sure that it's possible to define `Persistent` classes that have a
custom metaclass.
>>> class alternateMeta(type):
... type
>>> class alternate(object):
... __metaclass__ = alternateMeta
>>> class mixedMeta(alternateMeta, type):
... pass
>>> class mixed(alternate, Persistent):
... pass
>>> class mixed(Persistent, alternate):
... pass
Basic type structure
--------------------
>>> Persistent.__dictoffset__
0
>>> Persistent.__weakrefoffset__
0
>>> Persistent.__basicsize__ > object.__basicsize__
True
>>> P.__dictoffset__ > 0
True
>>> P.__weakrefoffset__ > 0
True
>>> P.__dictoffset__ < P.__weakrefoffset__
True
>>> P.__basicsize__ > Persistent.__basicsize__
True
Slots
-----
These are some simple tests of classes that have an ``__slots__``
attribute. Some of the classes should have slots, others shouldn't.
>>> class noDict(object):
... __slots__ = ['foo']
>>> class p_noDict(Persistent):
... __slots__ = ['foo']
>>> class p_shouldHaveDict(p_noDict):
... pass
>>> p_noDict.__dictoffset__
0
>>> x = p_noDict()
>>> x.foo = 1
>>> x.foo
1
>>> x.bar = 1
Traceback (most recent call last):
...
AttributeError: 'p_noDict' object has no attribute 'bar'
>>> x._v_bar = 1
Traceback (most recent call last):
...
AttributeError: 'p_noDict' object has no attribute '_v_bar'
>>> x.__dict__
Traceback (most recent call last):
...
AttributeError: 'p_noDict' object has no attribute '__dict__'
The various _p_ attributes are unaffected by slots.
>>> p._p_oid
>>> p._p_jar
>>> p._p_state
0
If the most-derived class does not specify
>>> p_shouldHaveDict.__dictoffset__ > 0
True
>>> x = p_shouldHaveDict()
>>> isinstance(x.__dict__, dict)
True
Pickling
--------
There's actually a substantial effort involved in making subclasses of
`Persistent` work with plain-old pickle. The ZODB serialization layer never
calls pickle on an object; it pickles the object's class description and its
state as two separate pickles.
>>> import pickle
>>> p = P()
>>> p.inc()
>>> p2 = pickle.loads(pickle.dumps(p))
>>> p2.__class__ is P
True
>>> p2.x == p.x
True
We should also test that pickle works with custom getstate and setstate.
Perhaps even reduce. The problem is that pickling depends on finding the
class in a particular module, and classes defined here won't appear in any
module. We could require each user of the tests to define a base class, but
that might be tedious.
Interfaces
----------
Some versions of Zope and ZODB have the `zope.interface` package available.
If it is available, then persistent will be associated with several
interfaces. It's hard to write a doctest test that runs the tests only if
`zope.interface` is available, so this test looks a little unusual. One
problem is that the assert statements won't do anything if you run with `-O`.
>>> try:
... import zope.interface
... except ImportError:
... pass
... else:
... from persistent.interfaces import IPersistent
... assert IPersistent.implementedBy(Persistent)
... p = Persistent()
... assert IPersistent.providedBy(p)
... assert IPersistent.implementedBy(P)
... p = P()
... assert IPersistent.providedBy(p)
#############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
Picklable = None # avoid global import of Persistent; updated later
class PersistenceTest(unittest.TestCase):
def _makeOne(self):
from persistent import Persistent
class P(Persistent):
pass
return P()
def _makeJar(self):
from persistent.tests.utils import ResettingJar
return ResettingJar()
def test_oid_initial_value(self):
obj = self._makeOne()
self.assertEqual(obj._p_oid, None)
def test_oid_mutable_and_deletable_when_no_jar(self):
OID = '\x01' * 8
obj = self._makeOne()
obj._p_oid = OID
self.assertEqual(obj._p_oid, OID)
del obj._p_oid
self.assertEqual(obj._p_oid, None)
def test_oid_immutable_when_in_jar(self):
OID = '\x01' * 8
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
# Can't change oid of cache object.
def deloid():
del obj._p_oid
self.assertRaises(ValueError, deloid)
def setoid():
obj._p_oid = OID
self.assertRaises(ValueError, setoid)
# The value returned for _p_changed can be one of:
# 0 -- it is not changed
# 1 -- it is changed
# None -- it is a ghost
def test_change_via_setattr(self):
from persistent import CHANGED
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj.x = 1
self.assertEqual(obj._p_changed, 1)
self.assertEqual(obj._p_state, CHANGED)
self.assert_(obj in jar.registered)
def test_setattr_then_mark_uptodate(self):
from persistent import UPTODATE
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj.x = 1
obj._p_changed = 0
self.assertEqual(obj._p_changed, 0)
self.assertEqual(obj._p_state, UPTODATE)
def test_set_changed_directly(self):
from persistent import CHANGED
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj._p_changed = 1
self.assertEqual(obj._p_changed, 1)
self.assertEqual(obj._p_state, CHANGED)
self.assert_(obj in jar.registered)
def test_cant_ghostify_if_changed(self):
from persistent import CHANGED
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
# setting obj._p_changed to None ghostifies if the
# object is in the up-to-date state, but not otherwise.
obj.x = 1
obj._p_changed = None
self.assertEqual(obj._p_changed, 1)
self.assertEqual(obj._p_state, CHANGED)
def test_can_ghostify_if_uptodate(self):
from persistent import GHOST
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj.x = 1
obj._p_changed = 0
obj._p_changed = None
self.assertEqual(obj._p_changed, None)
self.assertEqual(obj._p_state, GHOST)
def test_can_ghostify_if_changed_but_del__p_changed(self):
from persistent import GHOST
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
# You can transition directly from modified to ghost if
# you delete the _p_changed attribute.
obj.x = 1
del obj._p_changed
self.assertEqual(obj._p_changed, None)
self.assertEqual(obj._p_state, GHOST)
def test__p_state_immutable(self):
from persistent import CHANGED
from persistent import GHOST
from persistent import STICKY
from persistent import UPTODATE
# make sure we can't write to _p_state; we don't want yet
# another way to change state!
obj = self._makeOne()
def setstate(value):
obj._p_state = value
self.assertRaises(Exception, setstate, GHOST)
self.assertRaises(Exception, setstate, UPTODATE)
self.assertRaises(Exception, setstate, CHANGED)
self.assertRaises(Exception, setstate, STICKY)
def test_invalidate(self):
from persistent import GHOST
from persistent import UPTODATE
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj._p_activate()
self.assertEqual(obj._p_changed, 0)
self.assertEqual(obj._p_state, UPTODATE)
obj._p_invalidate()
self.assertEqual(obj._p_changed, None)
self.assertEqual(obj._p_state, GHOST)
def test_invalidate_activate_invalidate(self):
from persistent import GHOST
obj = self._makeOne()
jar = self._makeJar()
jar.add(obj)
obj._p_invalidate()
obj._p_activate()
obj.x = 1
obj._p_invalidate()
self.assertEqual(obj._p_changed, None)
self.assertEqual(obj._p_state, GHOST)
def test_initial_serial(self):
NOSERIAL = "\000" * 8
obj = self._makeOne()
self.assertEqual(obj._p_serial, NOSERIAL)
def test_setting_serial_w_invalid_types_raises(self):
# Serial must be an 8-digit string
obj = self._makeOne()
def set(val):
obj._p_serial = val
self.assertRaises(ValueError, set, 1)
self.assertRaises(ValueError, set, "0123")
self.assertRaises(ValueError, set, "012345678")
self.assertRaises(ValueError, set, u"01234567")
def test_del_serial_returns_to_initial(self):
NOSERIAL = "\000" * 8
obj = self._makeOne()
obj._p_serial = "01234567"
del obj._p_serial
self.assertEqual(obj._p_serial, NOSERIAL)
def test_initial_mtime(self):
obj = self._makeOne()
self.assertEqual(obj._p_mtime, None)
def test_setting_serial_sets_mtime_to_now(self):
from persistent.timestamp import TimeStamp
obj = self._makeOne()
ts = TimeStamp(2011, 2, 16, 14, 37, 22.0)
obj._p_serial = ts.raw()
self.assertEqual(obj._p_mtime, ts.timeTime())
self.assert_(isinstance(obj._p_mtime, float))
def test_pickle_unpickle(self):
import pickle
from persistent import Persistent
# see above: class must be at module scope to be pickled.
global Picklable
class Picklable(Persistent):
pass
obj = Picklable()
obj.attr = "test"
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
self.assertEqual(obj.attr, obj2.attr)
def test___getattr__(self):
from persistent import CHANGED
from persistent import Persistent
class H1(Persistent):
def __init__(self):
self.n = 0
def __getattr__(self, attr):
self.n += 1
return self.n
obj = H1()
self.assertEqual(obj.larry, 1)
self.assertEqual(obj.curly, 2)
self.assertEqual(obj.moe, 3)
jar = self._makeJar()
jar.add(obj)
obj._p_invalidate()
# The simple Jar used for testing re-initializes the object.
self.assertEqual(obj.larry, 1)
# The getattr hook modified the object, so it should now be
# in the changed state.
self.assertEqual(obj._p_changed, 1)
self.assertEqual(obj._p_state, CHANGED)
self.assertEqual(obj.curly, 2)
self.assertEqual(obj.moe, 3)
def test___getattribute__(self):
from persistent import CHANGED
from persistent import Persistent
class H2(Persistent):
def __init__(self):
self.n = 0
def __getattribute__(self, attr):
supergetattr = super(H2, self).__getattribute__
try:
return supergetattr(attr)
except AttributeError:
n = supergetattr("n")
self.n = n + 1
return n + 1
obj = H2()
self.assertEqual(obj.larry, 1)
self.assertEqual(obj.curly, 2)
self.assertEqual(obj.moe, 3)
jar = self._makeJar()
jar.add(obj)
obj._p_invalidate()
# The simple Jar used for testing re-initializes the object.
self.assertEqual(obj.larry, 1)
# The getattr hook modified the object, so it should now be
# in the changed state.
self.assertEqual(obj._p_changed, 1)
self.assertEqual(obj._p_state, CHANGED)
self.assertEqual(obj.curly, 2)
self.assertEqual(obj.moe, 3)
# TODO: Need to decide how __setattr__ and __delattr__ should work,
# then write tests.
def test_suite():
return unittest.makeSuite(PersistenceTest)
##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
class DummyConnection:
def setklassstate(self, obj):
"""Method used by PickleCache."""
def test_delitem():
"""
>>> from persistent import PickleCache
>>> conn = DummyConnection()
>>> cache = PickleCache(conn)
>>> del cache[''] # doctest: +ELLIPSIS
Traceback (most recent call last):
...
KeyError:...
>>> from persistent import Persistent
>>> class Derived(Persistent):
... pass
>>> p = Derived()
>>> p._p_oid = 'foo'
>>> p._p_jar = conn
>>> cache['foo'] = p
>>> del cache['foo']
"""
def new_ghost():
"""
Creating ghosts (from scratch, as opposed to ghostifying a non-ghost)
in the curremt implementation is rather tricky. IPeristent doesn't
really provide the right interface given that:
- _p_deactivate and _p_invalidate are overridable and could assume
that the object's state is properly initialized.
- Assigning _p_changed to None or deleting it just calls _p_deactivate
or _p_invalidate.
The current cache implementation is intimately tied up with the
persistence implementation and has internal access to the persistence
state. The cache implementation can update the persistence state for
newly created and ininitialized objects directly.
The future persistence and cache implementations will be far more
decoupled. The persistence implementation will only manage object
state and generate object-usage events. The cache implemnentation(s)
will be rersponsible for managing persistence-related (meta-)state,
such as _p_state, _p_changed, _p_oid, etc. So in that future
implemention, the cache will be more central to managing object
persistence information.
Caches have a new_ghost method that:
- adds an object to the cache, and
- initializes its persistence data.
>>> import persistent
>>> class C(persistent.Persistent):
... pass
>>> from persistent.tests.utils import ResettingJar
>>> jar = ResettingJar()
>>> cache = persistent.PickleCache(jar, 10, 100)
>>> ob = C.__new__(C)
>>> cache.new_ghost('1', ob)
>>> ob._p_changed
>>> ob._p_jar is jar
True
>>> ob._p_oid
'1'
>>> cache.cache_non_ghost_count
0
<<< cache.total_estimated_size # WTF?
0
Peristent meta classes work too:
>>> import ZODB.persistentclass
>>> class PC:
... __metaclass__ = ZODB.persistentclass.PersistentMetaClass
>>> PC._p_oid
>>> PC._p_jar
>>> PC._p_serial
>>> PC._p_changed
False
>>> cache.new_ghost('2', PC)
>>> PC._p_oid
'2'
>>> PC._p_jar is jar
True
>>> PC._p_serial
>>> PC._p_changed
False
"""
if 0: # this test doesn't belong here!
def cache_invalidate_and_minimize_used_to_leak_None_ref():
"""Persistent weak references
>>> import transaction
>>> import ZODB.tests.util
>>> db = ZODB.tests.util.DB()
>>> conn = db.open()
>>> conn.root.p = p = conn.root().__class__()
>>> transaction.commit()
>>> import sys
>>> old = sys.getrefcount(None)
>>> conn._cache.invalidate(p._p_oid)
>>> sys.getrefcount(None) - old
0
>>> _ = conn.root.p.keys()
>>> old = sys.getrefcount(None)
>>> conn._cache.minimize()
>>> sys.getrefcount(None) - old
0
>>> db.close()
"""
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing.doctest import DocTestSuite
else:
from doctest import DocTestSuite
import unittest
def test_suite():
return unittest.TestSuite((
DocTestSuite(),
))
if __name__ == '__main__':
unittest.main()
##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests for PersistentList
"""
import unittest
l0 = []
l1 = [0]
l2 = [0, 1]
class OtherList:
def __init__(self, initlist):
self.__data = initlist
def __len__(self):
return len(self.__data)
def __getitem__(self, i):
return self.__data[i]
class TestPList(unittest.TestCase):
def _getTargetClass(self):
from persistent.list import PersistentList
return PersistentList
def test_volatile_attributes_not_persisted(self):
# http://www.zope.org/Collectors/Zope/2052
m = self._getTargetClass()()
m.foo = 'bar'
m._v_baz = 'qux'
state = m.__getstate__()
self.failUnless('foo' in state)
self.failIf('_v_baz' in state)
def testTheWorld(self):
# Test constructors
pl = self._getTargetClass()
u = pl()
u0 = pl(l0)
u1 = pl(l1)
u2 = pl(l2)
uu = pl(u)
uu0 = pl(u0)
uu1 = pl(u1)
uu2 = pl(u2)
v = pl(tuple(u))
v0 = pl(OtherList(u0))
vv = pl("this is also a sequence")
# Test __repr__
eq = self.assertEqual
eq(str(u0), str(l0), "str(u0) == str(l0)")
eq(repr(u1), repr(l1), "repr(u1) == repr(l1)")
eq(`u2`, `l2`, "`u2` == `l2`")
# Test __cmp__ and __len__
def mycmp(a, b):
r = cmp(a, b)
if r < 0: return -1
if r > 0: return 1
return r
all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2]
for a in all:
for b in all:
eq(mycmp(a, b), mycmp(len(a), len(b)),
"mycmp(a, b) == mycmp(len(a), len(b))")
# Test __getitem__
for i in range(len(u2)):
eq(u2[i], i, "u2[i] == i")
# Test __setitem__
uu2[0] = 0
uu2[1] = 100
try:
uu2[2] = 200
except IndexError:
pass
else:
raise TestFailed("uu2[2] shouldn't be assignable")
# Test __delitem__
del uu2[1]
del uu2[0]
try:
del uu2[0]
except IndexError:
pass
else:
raise TestFailed("uu2[0] shouldn't be deletable")
# Test __getslice__
for i in range(-3, 4):
eq(u2[:i], l2[:i], "u2[:i] == l2[:i]")
eq(u2[i:], l2[i:], "u2[i:] == l2[i:]")
for j in range(-3, 4):
eq(u2[i:j], l2[i:j], "u2[i:j] == l2[i:j]")
# Test __setslice__
for i in range(-3, 4):
u2[:i] = l2[:i]
eq(u2, l2, "u2 == l2")
u2[i:] = l2[i:]
eq(u2, l2, "u2 == l2")
for j in range(-3, 4):
u2[i:j] = l2[i:j]
eq(u2, l2, "u2 == l2")
uu2 = u2[:]
uu2[:0] = [-2, -1]
eq(uu2, [-2, -1, 0, 1], "uu2 == [-2, -1, 0, 1]")
uu2[0:] = []
eq(uu2, [], "uu2 == []")
# Test __contains__
for i in u2:
self.failUnless(i in u2, "i in u2")
for i in min(u2)-1, max(u2)+1:
self.failUnless(i not in u2, "i not in u2")
# Test __delslice__
uu2 = u2[:]
del uu2[1:2]
del uu2[0:1]
eq(uu2, [], "uu2 == []")
uu2 = u2[:]
del uu2[1:]
del uu2[:1]
eq(uu2, [], "uu2 == []")
# Test __add__, __radd__, __mul__ and __rmul__
#self.failUnless(u1 + [] == [] + u1 == u1, "u1 + [] == [] + u1 == u1")
self.failUnless(u1 + [1] == u2, "u1 + [1] == u2")
#self.failUnless([-1] + u1 == [-1, 0], "[-1] + u1 == [-1, 0]")
self.failUnless(u2 == u2*1 == 1*u2, "u2 == u2*1 == 1*u2")
self.failUnless(u2+u2 == u2*2 == 2*u2, "u2+u2 == u2*2 == 2*u2")
self.failUnless(u2+u2+u2 == u2*3 == 3*u2, "u2+u2+u2 == u2*3 == 3*u2")
# Test append
u = u1[:]
u.append(1)
eq(u, u2, "u == u2")
# Test insert
u = u2[:]
u.insert(0, -1)
eq(u, [-1, 0, 1], "u == [-1, 0, 1]")
# Test pop
u = pl([0, -1, 1])
u.pop()
eq(u, [0, -1], "u == [0, -1]")
u.pop(0)
eq(u, [-1], "u == [-1]")
# Test remove
u = u2[:]
u.remove(1)
eq(u, u1, "u == u1")
# Test count
u = u2*3
eq(u.count(0), 3, "u.count(0) == 3")
eq(u.count(1), 3, "u.count(1) == 3")
eq(u.count(2), 0, "u.count(2) == 0")
# Test index
eq(u2.index(0), 0, "u2.index(0) == 0")
eq(u2.index(1), 1, "u2.index(1) == 1")
try:
u2.index(2)
except ValueError:
pass
else:
raise TestFailed("expected ValueError")
# Test reverse
u = u2[:]
u.reverse()
eq(u, [1, 0], "u == [1, 0]")
u.reverse()
eq(u, u2, "u == u2")
# Test sort
u = pl([1, 0])
u.sort()
eq(u, u2, "u == u2")
# Test keyword arguments to sort
u.sort(cmp=lambda x,y: cmp(y, x))
eq(u, [1, 0], "u == [1, 0]")
u.sort(key=lambda x:-x)
eq(u, [1, 0], "u == [1, 0]")
u.sort(reverse=True)
eq(u, [1, 0], "u == [1, 0]")
# Passing any other keyword arguments results in a TypeError
try:
u.sort(blah=True)
except TypeError:
pass
else:
raise TestFailed("expected TypeError")
# Test extend
u = u1[:]
u.extend(u2)
eq(u, u1 + u2, "u == u1 + u2")
# Test iadd
u = u1[:]
u += u2
eq(u, u1 + u2, "u == u1 + u2")
# Test imul
u = u1[:]
u *= 3
eq(u, u1 + u1 + u1, "u == u1 + u1 + u1")
def test_suite():
return unittest.makeSuite(TestPList)
if __name__ == "__main__":
loader = unittest.TestLoader()
unittest.main(testLoader=loader)
##############################################################################
#
# Copyright (c) Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import doctest
import unittest
from zope.testing import setupstack
def test_suite():
return unittest.TestSuite((
doctest.DocFileSuite('README.txt'),
))
l0 = {}
l1 = {0:0}
l2 = {0:0, 1:1}
class MappingTests(unittest.TestCase):
def _getTargetClass(self):
from persistent.mapping import PersistentMapping
return PersistentMapping
def test_volatile_attributes_not_persisted(self):
# http://www.zope.org/Collectors/Zope/2052
m = self._getTargetClass()()
m.foo = 'bar'
m._v_baz = 'qux'
state = m.__getstate__()
self.failUnless('foo' in state)
self.failIf('_v_baz' in state)
def testTheWorld(self):
# Test constructors
pm = self._getTargetClass()
u = pm()
u0 = pm(l0)
u1 = pm(l1)
u2 = pm(l2)
uu = pm(u)
uu0 = pm(u0)
uu1 = pm(u1)
uu2 = pm(u2)
class OtherMapping:
def __init__(self, initmapping):
self.__data = initmapping
def items(self):
return self.__data.items()
v0 = pm(OtherMapping(u0))
vv = pm([(0, 0), (1, 1)])
# Test __repr__
eq = self.assertEqual
eq(str(u0), str(l0), "str(u0) == str(l0)")
eq(repr(u1), repr(l1), "repr(u1) == repr(l1)")
eq(`u2`, `l2`, "`u2` == `l2`")
# Test __cmp__ and __len__
def mycmp(a, b):
r = cmp(a, b)
if r < 0: return -1
if r > 0: return 1
return r
all = [l0, l1, l2, u, u0, u1, u2, uu, uu0, uu1, uu2]
for a in all:
for b in all:
eq(mycmp(a, b), mycmp(len(a), len(b)),
"mycmp(a, b) == mycmp(len(a), len(b))")
# Test __getitem__
for i in range(len(u2)):
eq(u2[i], i, "u2[i] == i")
# Test get
for i in range(len(u2)):
eq(u2.get(i), i, "u2.get(i) == i")
eq(u2.get(i, 5), i, "u2.get(i, 5) == i")
for i in min(u2)-1, max(u2)+1:
eq(u2.get(i), None, "u2.get(i) == None")
eq(u2.get(i, 5), 5, "u2.get(i, 5) == 5")
# Test __setitem__
uu2[0] = 0
uu2[1] = 100
uu2[2] = 200
# Test __delitem__
del uu2[1]
del uu2[0]
try:
del uu2[0]
except KeyError:
pass
else:
raise TestFailed("uu2[0] shouldn't be deletable")
# Test __contains__
for i in u2:
self.failUnless(i in u2, "i in u2")
for i in min(u2)-1, max(u2)+1:
self.failUnless(i not in u2, "i not in u2")
# Test update
l = {"a":"b"}
u = pm(l)
u.update(u2)
for i in u:
self.failUnless(i in l or i in u2, "i in l or i in u2")
for i in l:
self.failUnless(i in u, "i in u")
for i in u2:
self.failUnless(i in u, "i in u")
# Test setdefault
x = u2.setdefault(0, 5)
eq(x, 0, "u2.setdefault(0, 5) == 0")
x = u2.setdefault(5, 5)
eq(x, 5, "u2.setdefault(5, 5) == 5")
self.failUnless(5 in u2, "5 in u2")
# Test pop
x = u2.pop(1)
eq(x, 1, "u2.pop(1) == 1")
self.failUnless(1 not in u2, "1 not in u2")
try:
u2.pop(1)
except KeyError:
pass
else:
raise TestFailed("1 should not be poppable from u2")
x = u2.pop(1, 7)
eq(x, 7, "u2.pop(1, 7) == 7")
# Test popitem
items = u2.items()
key, value = u2.popitem()
self.failUnless((key, value) in items, "key, value in items")
self.failUnless(key not in u2, "key not in u2")
# Test clear
u2.clear()
eq(u2, {}, "u2 == {}")
def test_legacy_data():
"""
We've deprecated PersistentDict. If you import
persistent.dict.PersistentDict, you'll get
persistent.mapping.PersistentMapping.
>>> import persistent.dict, persistent.mapping
>>> persistent.dict.PersistentDict is persistent.mapping.PersistentMapping
True
PersistentMapping uses a data attribute for it's mapping data:
>>> m = persistent.mapping.PersistentMapping()
>>> m.__dict__
{'data': {}}
In the past, it used a _container attribute. For some time, the
implementation continued to use a _container attribute in pickles
(__get/setstate__) to be compatible with older releases. This isn't
really necessary any more. In fact, releases for which this might
matter can no longer share databases with current releases. Because
releases as recent as 3.9.0b5 still use _container in saved state, we
need to accept such state, but we stop producing it.
If we reset it's __dict__ with legacy data:
>>> m.__dict__.clear()
>>> m.__dict__['_container'] = {'a': 1}
>>> m.__dict__
{'_container': {'a': 1}}
>>> m._p_changed = 0
But when we perform any operations on it, the data will be converted
without marking the object as changed:
>>> m
{'a': 1}
>>> m.__dict__
{'data': {'a': 1}}
>>> m._p_changed
0
>>> m.__getstate__()
{'data': {'a': 1}}
"""
def test_suite():
return unittest.TestSuite((
doctest.DocTestSuite(),
unittest.makeSuite(MappingTests),
))
##############################################################################
#
# Copyright (c) 2004 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Overriding attr methods
This module tests and documents, through example, overriding attribute
access methods.
"""
from persistent import Persistent # ouch!
def _resettingJar():
from persistent.tests.utils import ResettingJar
return ResettingJar()
def _rememberingJar():
from persistent.tests.utils import RememberingJar
return RememberingJar()
class SampleOverridingGetattr(Persistent):
"""Example of overriding __getattr__
"""
def __getattr__(self, name):
"""Get attributes that can't be gotten the usual way
The __getattr__ method works pretty much the same for persistent
classes as it does for other classes. No special handling is
needed. If an object is a ghost, then it will be activated before
__getattr__ is called.
In this example, our objects returns a tuple with the attribute
name, converted to upper case and the value of _p_changed, for any
attribute that isn't handled by the default machinery.
>>> o = SampleOverridingGetattr()
>>> o._p_changed
False
>>> o._p_oid
>>> o._p_jar
>>> o.spam
('SPAM', False)
>>> o.spam = 1
>>> o.spam
1
We'll save the object, so it can be deactivated:
>>> jar = _resettingJar()
>>> jar.add(o)
>>> o._p_deactivate()
>>> o._p_changed
And now, if we ask for an attribute it doesn't have,
>>> o.eggs
('EGGS', False)
And we see that the object was activated before calling the
__getattr__ method.
"""
# Don't pretend we have any special attributes.
if name.startswith("__") and name.endswrith("__"):
raise AttributeError(name)
else:
return name.upper(), self._p_changed
class SampleOverridingGetattributeSetattrAndDelattr(Persistent):
"""Example of overriding __getattribute__, __setattr__, and __delattr__
In this example, we'll provide an example that shows how to
override the __getattribute__, __setattr__, and __delattr__
methods. We'll create a class that stores it's attributes in a
secret dictionary within it's instance dictionary.
The class will have the policy that variables with names starting
with 'tmp_' will be volatile.
"""
def __init__(self, **kw):
self.__dict__['__secret__'] = kw.copy()
def __getattribute__(self, name):
"""Get an attribute value
The __getattribute__ method is called for all attribute
accesses. It overrides the attribute access support inherited
from Persistent.
Our sample class let's us provide initial values as keyword
arguments to the constructor:
>>> o = SampleOverridingGetattributeSetattrAndDelattr(x=1)
>>> o._p_changed
0
>>> o._p_oid
>>> o._p_jar
>>> o.x
1
>>> o.y
Traceback (most recent call last):
...
AttributeError: y
Next, we'll save the object in a database so that we can
deactivate it:
>>> jar = _rememberingJar()
>>> jar.add(o)
>>> o._p_deactivate()
>>> o._p_changed
And we'll get some data:
>>> o.x
1
which activates the object:
>>> o._p_changed
0
It works for missing attribes too:
>>> o._p_deactivate()
>>> o._p_changed
>>> o.y
Traceback (most recent call last):
...
AttributeError: y
>>> o._p_changed
0
See the very important note in the comment below!
"""
#################################################################
# IMPORTANT! READ THIS! 8->
#
# We *always* give Persistent a chance first.
# Persistent handles certain special attributes, like _p_
# attributes. In particular, the base class handles __dict__
# and __class__.
#
# We call _p_getattr. If it returns True, then we have to
# use Persistent.__getattribute__ to get the value.
#
#################################################################
if Persistent._p_getattr(self, name):
return Persistent.__getattribute__(self, name)
# Data should be in our secret dictionary:
secret = self.__dict__['__secret__']
if name in secret:
return secret[name]
# Maybe it's a method:
meth = getattr(self.__class__, name, None)
if meth is None:
raise AttributeError(name)
return meth.__get__(self, self.__class__)
def __setattr__(self, name, value):
"""Set an attribute value
The __setattr__ method is called for all attribute
assignments. It overrides the attribute assignment support
inherited from Persistent.
Implementors of __setattr__ methods:
1. Must call Persistent._p_setattr first to allow it
to handle some attributes and to make sure that the object
is activated if necessary, and
2. Must set _p_changed to mark objects as changed.
See the comments in the source below.
>>> o = SampleOverridingGetattributeSetattrAndDelattr()
>>> o._p_changed
0
>>> o._p_oid
>>> o._p_jar
>>> o.x
Traceback (most recent call last):
...
AttributeError: x
>>> o.x = 1
>>> o.x
1
Because the implementation doesn't store attributes directly
in the instance dictionary, we don't have a key for the attribute:
>>> 'x' in o.__dict__
False
Next, we'll give the object a "remembering" jar so we can
deactivate it:
>>> jar = _rememberingJar()
>>> jar.add(o)
>>> o._p_deactivate()
>>> o._p_changed
We'll modify an attribute
>>> o.y = 2
>>> o.y
2
which reactivates it, and markes it as modified, because our
implementation marked it as modified:
>>> o._p_changed
1
Now, if fake a commit:
>>> jar.fake_commit()
>>> o._p_changed
0
And deactivate the object:
>>> o._p_deactivate()
>>> o._p_changed
and then set a variable with a name starting with 'tmp_',
The object will be activated, but not marked as modified,
because our __setattr__ implementation doesn't mark the
object as changed if the name starts with 'tmp_':
>>> o.tmp_foo = 3
>>> o._p_changed
0
>>> o.tmp_foo
3
"""
#################################################################
# IMPORTANT! READ THIS! 8->
#
# We *always* give Persistent a chance first.
# Persistent handles certain special attributes, like _p_
# attributes.
#
# We call _p_setattr. If it returns True, then we are done.
# It has already set the attribute.
#
#################################################################
if Persistent._p_setattr(self, name, value):
return
self.__dict__['__secret__'][name] = value
if not name.startswith('tmp_'):
self._p_changed = 1
def __delattr__(self, name):
"""Delete an attribute value
The __delattr__ method is called for all attribute
deletions. It overrides the attribute deletion support
inherited from Persistent.
Implementors of __delattr__ methods:
1. Must call Persistent._p_delattr first to allow it
to handle some attributes and to make sure that the object
is activated if necessary, and
2. Must set _p_changed to mark objects as changed.
See the comments in the source below.
>>> o = SampleOverridingGetattributeSetattrAndDelattr(
... x=1, y=2, tmp_z=3)
>>> o._p_changed
0
>>> o._p_oid
>>> o._p_jar
>>> o.x
1
>>> del o.x
>>> o.x
Traceback (most recent call last):
...
AttributeError: x
Next, we'll save the object in a jar so that we can
deactivate it:
>>> jar = _rememberingJar()
>>> jar.add(o)
>>> o._p_deactivate()
>>> o._p_changed
If we delete an attribute:
>>> del o.y
The object is activated. It is also marked as changed because
our implementation marked it as changed.
>>> o._p_changed
1
>>> o.y
Traceback (most recent call last):
...
AttributeError: y
>>> o.tmp_z
3
Now, if fake a commit:
>>> jar.fake_commit()
>>> o._p_changed
0
And deactivate the object:
>>> o._p_deactivate()
>>> o._p_changed
and then delete a variable with a name starting with 'tmp_',
The object will be activated, but not marked as modified,
because our __delattr__ implementation doesn't mark the
object as changed if the name starts with 'tmp_':
>>> del o.tmp_z
>>> o._p_changed
0
>>> o.tmp_z
Traceback (most recent call last):
...
AttributeError: tmp_z
"""
#################################################################
# IMPORTANT! READ THIS! 8->
#
# We *always* give Persistent a chance first.
# Persistent handles certain special attributes, like _p_
# attributes.
#
# We call _p_delattr. If it returns True, then we are done.
# It has already deleted the attribute.
#
#################################################################
if Persistent._p_delattr(self, name):
return
del self.__dict__['__secret__'][name]
if not name.startswith('tmp_'):
self._p_changed = 1
def test_suite():
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing.doctest import DocTestSuite
else:
from doctest import DocTestSuite
return DocTestSuite()
##############################################################################
#
# Copyright (c) Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from persistent import Persistent
try:
from persistent import simple_new
except ImportError:
simple_new = None
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing import doctest
else:
import doctest
import unittest
class P(Persistent):
def __init__(self):
self.x = 0
def inc(self):
self.x += 1
def cpersistent_setstate_pointer_sanity():
"""
>>> Persistent().__setstate__({})
Traceback (most recent call last):
...
TypeError: this object has no instance dictionary
>>> class C(Persistent): __slots__ = 'x', 'y'
>>> C().__setstate__(({}, {}))
Traceback (most recent call last):
...
TypeError: this object has no instance dictionary
"""
if simple_new is not None:
def cpersistent_simple_new_invalid_argument():
"""
>>> simple_new('')
Traceback (most recent call last):
...
TypeError: simple_new argument must be a type object.
"""
def test_suite():
return unittest.TestSuite((
doctest.DocFileSuite("persistent.txt", globs={"P": P}),
doctest.DocTestSuite(),
))
##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from persistent import Persistent
import pickle
def print_dict(d):
d = d.items()
d.sort()
print '{%s}' % (', '.join(
[('%r: %r' % (k, v)) for (k, v) in d]
))
def cmpattrs(self, other, *attrs):
for attr in attrs:
if attr[:3] in ('_v_', '_p_'):
continue
c = cmp(getattr(self, attr, None), getattr(other, attr, None))
if c:
return c
return 0
class Simple(Persistent):
def __init__(self, name, **kw):
self.__name__ = name
self.__dict__.update(kw)
self._v_favorite_color = 'blue'
self._p_foo = 'bar'
def __cmp__(self, other):
return cmpattrs(self, other, '__class__', *(self.__dict__.keys()))
def test_basic_pickling():
"""
>>> x = Simple('x', aaa=1, bbb='foo')
>>> print_dict(x.__getstate__())
{'__name__': 'x', 'aaa': 1, 'bbb': 'foo'}
>>> f, (c,), state = x.__reduce__()
>>> f.__name__
'__newobj__'
>>> f.__module__
'copy_reg'
>>> c.__name__
'Simple'
>>> print_dict(state)
{'__name__': 'x', 'aaa': 1, 'bbb': 'foo'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
>>> x.__setstate__({'z': 1})
>>> x.__dict__
{'z': 1}
"""
class Custom(Simple):
def __new__(cls, x, y):
r = Persistent.__new__(cls)
r.x, r.y = x, y
return r
def __init__(self, x, y):
self.a = 42
def __getnewargs__(self):
return self.x, self.y
def __getstate__(self):
return self.a
def __setstate__(self, a):
self.a = a
def test_pickling_w_overrides():
"""
>>> x = Custom('x', 'y')
>>> x.a = 99
>>> (f, (c, ax, ay), a) = x.__reduce__()
>>> f.__name__
'__newobj__'
>>> f.__module__
'copy_reg'
>>> c.__name__
'Custom'
>>> ax, ay, a
('x', 'y', 99)
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
"""
class Slotted(Persistent):
__slots__ = 's1', 's2', '_p_splat', '_v_eek'
def __init__(self, s1, s2):
self.s1, self.s2 = s1, s2
self._v_eek = 1
self._p_splat = 2
class SubSlotted(Slotted):
__slots__ = 's3', 's4'
def __init__(self, s1, s2, s3):
Slotted.__init__(self, s1, s2)
self.s3 = s3
def __cmp__(self, other):
return cmpattrs(self, other, '__class__', 's1', 's2', 's3', 's4')
def test_pickling_w_slots_only():
"""
>>> x = SubSlotted('x', 'y', 'z')
>>> d, s = x.__getstate__()
>>> d
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
>>> x.s4 = 'spam'
>>> d, s = x.__getstate__()
>>> d
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
"""
class SubSubSlotted(SubSlotted):
def __init__(self, s1, s2, s3, **kw):
SubSlotted.__init__(self, s1, s2, s3)
self.__dict__.update(kw)
self._v_favorite_color = 'blue'
self._p_foo = 'bar'
def __cmp__(self, other):
return cmpattrs(self, other,
'__class__', 's1', 's2', 's3', 's4',
*(self.__dict__.keys()))
def test_pickling_w_slots():
"""
>>> x = SubSubSlotted('x', 'y', 'z', aaa=1, bbb='foo')
>>> d, s = x.__getstate__()
>>> print_dict(d)
{'aaa': 1, 'bbb': 'foo'}
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
>>> x.s4 = 'spam'
>>> d, s = x.__getstate__()
>>> print_dict(d)
{'aaa': 1, 'bbb': 'foo'}
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
"""
def test_pickling_w_slots_w_empty_dict():
"""
>>> x = SubSubSlotted('x', 'y', 'z')
>>> d, s = x.__getstate__()
>>> print_dict(d)
{}
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
>>> x.s4 = 'spam'
>>> d, s = x.__getstate__()
>>> print_dict(d)
{}
>>> print_dict(s)
{'s1': 'x', 's2': 'y', 's3': 'z', 's4': 'spam'}
>>> pickle.loads(pickle.dumps(x)) == x
1
>>> pickle.loads(pickle.dumps(x, 0)) == x
1
>>> pickle.loads(pickle.dumps(x, 1)) == x
1
>>> pickle.loads(pickle.dumps(x, 2)) == x
1
"""
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing.doctest import DocTestSuite
else:
from doctest import DocTestSuite
import unittest
def test_suite():
return unittest.TestSuite((
DocTestSuite(),
))
if __name__ == '__main__': unittest.main()
##############################################################################
#
# Copyright (c) 2009 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
_marker = object()
class PickleCacheTests(unittest.TestCase):
def _getTargetClass(self):
from persistent.picklecache import PickleCache
return PickleCache
def _makeOne(self, jar=None, target_size=10):
if jar is None:
jar = DummyConnection()
return self._getTargetClass()(jar, target_size)
def _makePersist(self, state=None, oid='foo', jar=_marker):
from persistent.interfaces import GHOST
if state is None:
state = GHOST
if jar is _marker:
jar = DummyConnection()
persist = DummyPersistent()
persist._p_state = state
persist._p_oid = oid
persist._p_jar = jar
return persist
def test_class_conforms_to_IPickleCache(self):
from zope.interface.verify import verifyClass
from persistent.interfaces import IPickleCache
verifyClass(IPickleCache, self._getTargetClass())
def test_instance_conforms_to_IPickleCache(self):
from zope.interface.verify import verifyObject
from persistent.interfaces import IPickleCache
verifyObject(IPickleCache, self._makeOne())
def test_empty(self):
cache = self._makeOne()
self.assertEqual(len(cache), 0)
self.assertEqual(len(cache.items()), 0)
self.assertEqual(len(cache.klass_items()), 0)
self.assertEqual(cache.ringlen(), 0)
self.assertEqual(len(cache.lru_items()), 0)
self.assertEqual(cache.cache_size, 10)
self.assertEqual(cache.cache_drain_resistance, 0)
self.assertEqual(cache.cache_non_ghost_count, 0)
self.assertEqual(dict(cache.cache_data), {})
self.assertEqual(cache.cache_klass_count, 0)
def test___getitem___nonesuch_raises_KeyError(self):
cache = self._makeOne()
self.assertRaises(KeyError, lambda: cache['nonesuch'])
def test_get_nonesuch_no_default(self):
cache = self._makeOne()
self.assertEqual(cache.get('nonesuch'), None)
def test_get_nonesuch_w_default(self):
cache = self._makeOne()
default = object
self.failUnless(cache.get('nonesuch', default) is default)
def test___setitem___non_string_oid_raises_ValueError(self):
cache = self._makeOne()
try:
cache[object()] = self._makePersist()
except ValueError:
pass
else:
self.fail("Didn't raise ValueError with non-string OID.")
def test___setitem___duplicate_oid_raises_KeyError(self):
cache = self._makeOne()
original = self._makePersist()
cache['original'] = original
duplicate = self._makePersist()
try:
cache['original'] = duplicate
except KeyError:
pass
else:
self.fail("Didn't raise KeyError with duplicate OID.")
def test___setitem___ghost(self):
from persistent.interfaces import GHOST
cache = self._makeOne()
ghost = self._makePersist(state=GHOST)
cache['ghost'] = ghost
self.assertEqual(len(cache), 1)
self.assertEqual(len(cache.items()), 1)
self.assertEqual(len(cache.klass_items()), 0)
self.assertEqual(cache.items()[0][0], 'ghost')
self.assertEqual(cache.ringlen(), 0)
self.failUnless(cache.items()[0][1] is ghost)
self.failUnless(cache['ghost'] is ghost)
def test___setitem___non_ghost(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
uptodate = self._makePersist(state=UPTODATE)
cache['uptodate'] = uptodate
self.assertEqual(len(cache), 1)
self.assertEqual(len(cache.items()), 1)
self.assertEqual(len(cache.klass_items()), 0)
self.assertEqual(cache.items()[0][0], 'uptodate')
self.assertEqual(cache.ringlen(), 1)
self.failUnless(cache.items()[0][1] is uptodate)
self.failUnless(cache['uptodate'] is uptodate)
self.failUnless(cache.get('uptodate') is uptodate)
def test___setitem___persistent_class(self):
class pclass(object):
pass
cache = self._makeOne()
cache['pclass'] = pclass
self.assertEqual(len(cache), 1)
self.assertEqual(len(cache.items()), 0)
self.assertEqual(len(cache.klass_items()), 1)
self.assertEqual(cache.klass_items()[0][0], 'pclass')
self.failUnless(cache.klass_items()[0][1] is pclass)
self.failUnless(cache['pclass'] is pclass)
self.failUnless(cache.get('pclass') is pclass)
def test___delitem___non_string_oid_raises_ValueError(self):
cache = self._makeOne()
try:
del cache[object()]
except ValueError:
pass
else:
self.fail("Didn't raise ValueError with non-string OID.")
def test___delitem___nonesuch_raises_KeyError(self):
cache = self._makeOne()
original = self._makePersist()
try:
del cache['nonesuch']
except KeyError:
pass
else:
self.fail("Didn't raise KeyError with nonesuch OID.")
def test_lruitems(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
cache['two'] = self._makePersist(oid='two', state=UPTODATE)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
items = cache.lru_items()
self.assertEqual(len(items), 3)
self.assertEqual(items[0][0], 'one')
self.assertEqual(items[1][0], 'two')
self.assertEqual(items[2][0], 'three')
def test_mru_nonesuch_raises_KeyError(self):
cache = self._makeOne()
try:
cache.mru('nonesuch')
except KeyError:
pass
else:
self.fail("Didn't raise KeyError with nonesuch OID.")
def test_mru_normal(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
cache['two'] = self._makePersist(oid='two', state=UPTODATE)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
cache.mru('two')
self.assertEqual(cache.ringlen(), 3)
items = cache.lru_items()
self.assertEqual(len(items), 3)
self.assertEqual(items[0][0], 'one')
self.assertEqual(items[1][0], 'three')
self.assertEqual(items[2][0], 'two')
def test_mru_ghost(self):
from persistent.interfaces import UPTODATE
from persistent.interfaces import GHOST
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
two = cache['two'] = self._makePersist(oid='two', state=GHOST)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
cache.mru('two')
self.assertEqual(cache.ringlen(), 2)
items = cache.lru_items()
self.assertEqual(len(items), 2)
self.assertEqual(items[0][0], 'one')
self.assertEqual(items[1][0], 'three')
def test_mru_was_ghost_now_active(self):
from persistent.interfaces import UPTODATE
from persistent.interfaces import GHOST
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
two = cache['two'] = self._makePersist(oid='two', state=GHOST)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
two._p_state = UPTODATE
cache.mru('two')
self.assertEqual(cache.ringlen(), 3)
items = cache.lru_items()
self.assertEqual(len(items), 3)
self.assertEqual(items[0][0], 'one')
self.assertEqual(items[1][0], 'three')
self.assertEqual(items[2][0], 'two')
def test_mru_first(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
cache['two'] = self._makePersist(oid='two', state=UPTODATE)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
cache.mru('one')
self.assertEqual(cache.ringlen(), 3)
items = cache.lru_items()
self.assertEqual(len(items), 3)
self.assertEqual(items[0][0], 'two')
self.assertEqual(items[1][0], 'three')
self.assertEqual(items[2][0], 'one')
def test_mru_last(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache['one'] = self._makePersist(oid='one', state=UPTODATE)
cache['two'] = self._makePersist(oid='two', state=UPTODATE)
cache['three'] = self._makePersist(oid='three', state=UPTODATE)
cache.mru('three')
self.assertEqual(cache.ringlen(), 3)
items = cache.lru_items()
self.assertEqual(len(items), 3)
self.assertEqual(items[0][0], 'one')
self.assertEqual(items[1][0], 'two')
self.assertEqual(items[2][0], 'three')
def test_incrgc_simple(self):
import gc
from persistent.interfaces import UPTODATE
cache = self._makeOne()
oids = []
for i in range(100):
oid = 'oid_%04d' % i
oids.append(oid)
cache[oid] = self._makePersist(oid=oid, state=UPTODATE)
self.assertEqual(cache.cache_non_ghost_count, 100)
cache.incrgc()
gc.collect() # banish the ghosts who are no longer in the ring
self.assertEqual(cache.cache_non_ghost_count, 10)
items = cache.lru_items()
self.assertEqual(len(items), 10)
self.assertEqual(items[0][0], 'oid_0090')
self.assertEqual(items[1][0], 'oid_0091')
self.assertEqual(items[2][0], 'oid_0092')
self.assertEqual(items[3][0], 'oid_0093')
self.assertEqual(items[4][0], 'oid_0094')
self.assertEqual(items[5][0], 'oid_0095')
self.assertEqual(items[6][0], 'oid_0096')
self.assertEqual(items[7][0], 'oid_0097')
self.assertEqual(items[8][0], 'oid_0098')
self.assertEqual(items[9][0], 'oid_0099')
for oid in oids[:90]:
self.failUnless(cache.get(oid) is None)
for oid in oids[90:]:
self.failIf(cache.get(oid) is None)
def test_incrgc_w_smaller_drain_resistance(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache.drain_resistance = 2
oids = []
for i in range(100):
oid = 'oid_%04d' % i
oids.append(oid)
cache[oid] = self._makePersist(oid=oid, state=UPTODATE)
self.assertEqual(cache.cache_non_ghost_count, 100)
cache.incrgc()
self.assertEqual(cache.cache_non_ghost_count, 10)
def test_incrgc_w_larger_drain_resistance(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
cache.drain_resistance = 2
cache.target_size = 90
oids = []
for i in range(100):
oid = 'oid_%04d' % i
oids.append(oid)
cache[oid] = self._makePersist(oid=oid, state=UPTODATE)
self.assertEqual(cache.cache_non_ghost_count, 100)
cache.incrgc()
self.assertEqual(cache.cache_non_ghost_count, 49)
def test_full_sweep(self):
import gc
from persistent.interfaces import UPTODATE
cache = self._makeOne()
oids = []
for i in range(100):
oid = 'oid_%04d' % i
oids.append(oid)
cache[oid] = self._makePersist(oid=oid, state=UPTODATE)
self.assertEqual(cache.cache_non_ghost_count, 100)
cache.full_sweep()
gc.collect() # banish the ghosts who are no longer in the ring
self.assertEqual(cache.cache_non_ghost_count, 0)
self.failUnless(cache.ring.next is cache.ring)
for oid in oids:
self.failUnless(cache.get(oid) is None)
def test_minimize(self):
import gc
from persistent.interfaces import UPTODATE
cache = self._makeOne()
oids = []
for i in range(100):
oid = 'oid_%04d' % i
oids.append(oid)
cache[oid] = self._makePersist(oid=oid, state=UPTODATE)
self.assertEqual(cache.cache_non_ghost_count, 100)
cache.minimize()
gc.collect() # banish the ghosts who are no longer in the ring
self.assertEqual(cache.cache_non_ghost_count, 0)
for oid in oids:
self.failUnless(cache.get(oid) is None)
def test_new_ghost_non_persistent_object(self):
cache = self._makeOne()
self.assertRaises(AttributeError, cache.new_ghost, '123', object())
def test_new_ghost_obj_already_has_oid(self):
from persistent.interfaces import GHOST
candidate = self._makePersist(oid='123', state=GHOST)
cache = self._makeOne()
self.assertRaises(ValueError, cache.new_ghost, '123', candidate)
def test_new_ghost_obj_already_has_jar(self):
class Dummy(object):
_p_oid = None
_p_jar = object()
cache = self._makeOne()
candidate = self._makePersist(oid=None, jar=object())
self.assertRaises(ValueError, cache.new_ghost, '123', candidate)
def test_new_ghost_obj_already_in_cache(self):
cache = self._makeOne()
candidate = self._makePersist(oid=None, jar=None)
cache['123'] = candidate
self.assertRaises(KeyError, cache.new_ghost, '123', candidate)
def test_new_ghost_success_already_ghost(self):
from persistent.interfaces import GHOST
cache = self._makeOne()
candidate = self._makePersist(oid=None, jar=None)
cache.new_ghost('123', candidate)
self.failUnless(cache.get('123') is candidate)
self.assertEqual(candidate._p_oid, '123')
self.assertEqual(candidate._p_jar, cache.jar)
self.assertEqual(candidate._p_state, GHOST)
def test_new_ghost_success_not_already_ghost(self):
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
cache = self._makeOne()
candidate = self._makePersist(oid=None, jar=None, state=UPTODATE)
cache.new_ghost('123', candidate)
self.failUnless(cache.get('123') is candidate)
self.assertEqual(candidate._p_oid, '123')
self.assertEqual(candidate._p_jar, cache.jar)
self.assertEqual(candidate._p_state, GHOST)
def test_new_ghost_w_pclass_non_ghost(self):
class Pclass(object):
_p_oid = None
_p_jar = None
cache = self._makeOne()
cache.new_ghost('123', Pclass)
self.failUnless(cache.get('123') is Pclass)
self.failUnless(cache.persistent_classes['123'] is Pclass)
self.assertEqual(Pclass._p_oid, '123')
self.assertEqual(Pclass._p_jar, cache.jar)
def test_new_ghost_w_pclass_ghost(self):
class Pclass(object):
_p_oid = None
_p_jar = None
cache = self._makeOne()
cache.new_ghost('123', Pclass)
self.failUnless(cache.get('123') is Pclass)
self.failUnless(cache.persistent_classes['123'] is Pclass)
self.assertEqual(Pclass._p_oid, '123')
self.assertEqual(Pclass._p_jar, cache.jar)
def test_reify_miss_single(self):
cache = self._makeOne()
self.assertRaises(KeyError, cache.reify, '123')
def test_reify_miss_multiple(self):
cache = self._makeOne()
self.assertRaises(KeyError, cache.reify, ['123', '456'])
def test_reify_hit_single_ghost(self):
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
cache = self._makeOne()
candidate = self._makePersist(oid='123', jar=cache.jar, state=GHOST)
cache['123'] = candidate
self.assertEqual(cache.ringlen(), 0)
cache.reify('123')
self.assertEqual(cache.ringlen(), 1)
items = cache.lru_items()
self.assertEqual(items[0][0], '123')
self.failUnless(items[0][1] is candidate)
self.assertEqual(candidate._p_state, UPTODATE)
def test_reify_hit_single_non_ghost(self):
from persistent.interfaces import UPTODATE
cache = self._makeOne()
candidate = self._makePersist(oid='123', jar=cache.jar, state=UPTODATE)
cache['123'] = candidate
self.assertEqual(cache.ringlen(), 1)
cache.reify('123')
self.assertEqual(cache.ringlen(), 1)
self.assertEqual(candidate._p_state, UPTODATE)
def test_reify_hit_multiple_mixed(self):
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
cache = self._makeOne()
c1 = self._makePersist(oid='123', jar=cache.jar, state=GHOST)
cache['123'] = c1
c2 = self._makePersist(oid='456', jar=cache.jar, state=UPTODATE)
cache['456'] = c2
self.assertEqual(cache.ringlen(), 1)
cache.reify(['123', '456'])
self.assertEqual(cache.ringlen(), 2)
self.assertEqual(c1._p_state, UPTODATE)
self.assertEqual(c2._p_state, UPTODATE)
def test_invalidate_miss_single(self):
cache = self._makeOne()
cache.invalidate('123') # doesn't raise
def test_invalidate_miss_multiple(self):
cache = self._makeOne()
cache.invalidate(['123', '456']) # doesn't raise
def test_invalidate_hit_single_ghost(self):
from persistent.interfaces import GHOST
cache = self._makeOne()
candidate = self._makePersist(oid='123', jar=cache.jar, state=GHOST)
cache['123'] = candidate
self.assertEqual(cache.ringlen(), 0)
cache.invalidate('123')
self.assertEqual(cache.ringlen(), 0)
self.assertEqual(candidate._p_state, GHOST)
def test_invalidate_hit_single_non_ghost(self):
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
cache = self._makeOne()
candidate = self._makePersist(oid='123', jar=cache.jar, state=UPTODATE)
cache['123'] = candidate
self.assertEqual(cache.ringlen(), 1)
cache.invalidate('123')
self.assertEqual(cache.ringlen(), 0)
self.assertEqual(candidate._p_state, GHOST)
def test_invalidate_hit_multiple_mixed(self):
from persistent.interfaces import GHOST
from persistent.interfaces import UPTODATE
cache = self._makeOne()
c1 = self._makePersist(oid='123', jar=cache.jar, state=GHOST)
cache['123'] = c1
c2 = self._makePersist(oid='456', jar=cache.jar, state=UPTODATE)
cache['456'] = c2
self.assertEqual(cache.ringlen(), 1)
cache.invalidate(['123', '456'])
self.assertEqual(cache.ringlen(), 0)
self.assertEqual(c1._p_state, GHOST)
self.assertEqual(c2._p_state, GHOST)
def test_invalidate_hit_pclass(self):
class Pclass(object):
_p_oid = None
_p_jar = None
cache = self._makeOne()
cache['123'] = Pclass
self.failUnless(cache.persistent_classes['123'] is Pclass)
cache.invalidate('123')
self.failIf('123' in cache.persistent_classes)
class DummyPersistent(object):
def _p_invalidate(self):
from persistent.interfaces import GHOST
self._p_state = GHOST
def _p_activate(self):
from persistent.interfaces import UPTODATE
self._p_state = UPTODATE
class DummyConnection:
pass
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(PickleCacheTests),
))
if __name__ == '__main__':
unittest.main()
##############################################################################
#
# Copyright (c) 2011 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
class PersistentTests(unittest.TestCase):
def _getTargetClass(self):
from persistent.pyPersistence import Persistent
return Persistent
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def _makeJar(self):
from zope.interface import implements
from persistent.interfaces import IPersistentDataManager
class _Cache(object):
def __init__(self):
self._mru = []
def mru(self, oid):
self._mru.append(oid)
class _Jar(object):
implements(IPersistentDataManager)
def __init__(self):
self._loaded = []
self._registered = []
self._cache = _Cache()
def setstate(self, obj):
self._loaded.append(obj._p_oid)
def register(self, obj):
self._registered.append(obj._p_oid)
return _Jar()
def _makeOneWithJar(self, klass=None):
from persistent.pyPersistence import _makeOctets
OID = _makeOctets('\x01' * 8)
if klass is not None:
inst = klass()
else:
inst = self._makeOne()
jar = self._makeJar()
inst._p_jar = jar
inst._p_oid = OID
return inst, jar, OID
def test_class_conforms_to_IPersistent(self):
from zope.interface.verify import verifyClass
from persistent.interfaces import IPersistent
verifyClass(IPersistent, self._getTargetClass())
def test_instance_conforms_to_IPersistent(self):
from zope.interface.verify import verifyObject
from persistent.interfaces import IPersistent
verifyObject(IPersistent, self._makeOne())
def test_ctor(self):
from persistent.pyPersistence import _INITIAL_SERIAL
inst = self._makeOne()
self.assertEqual(inst._p_jar, None)
self.assertEqual(inst._p_oid, None)
self.assertEqual(inst._p_serial, _INITIAL_SERIAL)
self.assertEqual(inst._p_changed, None)
self.assertEqual(inst._p_sticky, False)
def test_assign_p_jar_w_invalid_jar(self):
inst = self._makeOne()
def _test():
inst._p_jar = object()
self.assertRaises(ValueError, _test)
def test_assign_p_jar_w_new_jar(self):
inst = self._makeOne()
inst._p_jar = self._makeJar()
jar = self._makeJar()
def _test():
inst._p_jar = jar
self.assertRaises(ValueError, _test)
def test_assign_p_jar_w_valid_jar(self):
jar = self._makeJar()
inst = self._makeOne()
inst._p_jar = jar
self.failUnless(inst._p_jar is jar)
inst._p_jar = jar # reassign only to same DM
def test_assign_p_oid_w_invalid_oid(self):
inst = self._makeOne()
def _test():
inst._p_oid = object()
self.assertRaises(ValueError, _test)
def test_assign_p_oid_w_valid_oid(self):
from persistent.pyPersistence import _makeOctets
OID = _makeOctets('\x01' * 8)
inst = self._makeOne()
inst._p_oid = OID
self.assertEqual(inst._p_oid, OID)
inst._p_oid = OID # reassign only same OID
def test_assign_p_oid_w_new_oid_wo_jar(self):
from persistent.pyPersistence import _makeOctets
OID1 = _makeOctets('\x01' * 8)
OID2 = _makeOctets('\x02' * 8)
inst = self._makeOne()
inst._p_oid = OID1
inst._p_oid = OID2
self.assertEqual(inst._p_oid, OID2)
def test_assign_p_oid_w_new_oid_w_jar(self):
from persistent.pyPersistence import _makeOctets
OID1 = _makeOctets('\x01' * 8)
OID2 = _makeOctets('\x02' * 8)
inst = self._makeOne()
inst._p_oid = OID1
inst._p_jar = self._makeJar()
def _test():
inst._p_oid = OID2
self.assertRaises(ValueError, _test)
def test_delete_p_oid_wo_jar(self):
from persistent.pyPersistence import _makeOctets
OID = _makeOctets('\x01' * 8)
inst = self._makeOne()
inst._p_oid = OID
del inst._p_oid
self.assertEqual(inst._p_oid, None)
def test_delete_p_oid_w_jar(self):
from persistent.pyPersistence import _makeOctets
OID = _makeOctets('\x01' * 8)
inst = self._makeOne()
inst._p_oid = OID
inst._p_jar = self._makeJar()
def _test():
del inst._p_oid
self.assertRaises(ValueError, _test)
def test_assign_p_serial_w_invalid_type(self):
inst = self._makeOne()
def _test():
inst._p_serial = object()
self.assertRaises(ValueError, _test)
def test_assign_p_serial_too_short(self):
inst = self._makeOne()
def _test():
inst._p_serial = '\x01\x02\x03'
self.assertRaises(ValueError, _test)
def test_assign_p_serial_too_long(self):
inst = self._makeOne()
def _test():
inst._p_serial = '\x01\x02\x03' * 3
self.assertRaises(ValueError, _test)
def test_assign_p_serial_w_valid_serial(self):
from persistent.pyPersistence import _makeOctets
from persistent.pyPersistence import _INITIAL_SERIAL
SERIAL = _makeOctets('\x01' * 8)
inst = self._makeOne()
inst._p_serial = SERIAL
self.assertEqual(inst._p_serial, SERIAL)
inst._p_serial = None
self.assertEqual(inst._p_serial, _INITIAL_SERIAL)
def test_delete_p_serial(self):
from persistent.pyPersistence import _makeOctets
from persistent.pyPersistence import _INITIAL_SERIAL
SERIAL = _makeOctets('\x01' * 8)
inst = self._makeOne()
inst._p_serial = SERIAL
self.assertEqual(inst._p_serial, SERIAL)
del(inst._p_serial)
self.assertEqual(inst._p_serial, _INITIAL_SERIAL)
def test_query_p_changed(self):
inst = self._makeOne()
self.assertEqual(inst._p_changed, None)
inst._p_changed = True
self.assertEqual(inst._p_changed, True)
inst._p_changed = False
self.assertEqual(inst._p_changed, False)
def test_assign_p_changed_none_from_new(self):
inst = self._makeOne()
inst._p_changed = None
self.assertEqual(inst._p_status, 'new')
def test_assign_p_changed_true_from_new(self):
inst = self._makeOne()
inst._p_changed = True
self.assertEqual(inst._p_status, 'unsaved')
def test_assign_p_changed_false_from_new(self):
inst = self._makeOne()
inst._p_changed = False # activates
self.assertEqual(inst._p_status, 'saved')
def test_assign_p_changed_none_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_changed = None
# can't transition 'unsaved' -> 'new'
self.assertEqual(inst._p_status, 'unsaved')
def test_assign_p_changed_true_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_changed = True
self.assertEqual(inst._p_status, 'unsaved')
def test_assign_p_changed_false_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_changed = False
self.assertEqual(inst._p_status, 'saved')
def test_assign_p_changed_none_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = None
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_true_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._registered), [OID])
def test_assign_p_changed_false_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_none_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
jar._loaded = []
inst._p_changed = None
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_true_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._registered), [OID])
def test_assign_p_changed_false_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
jar._loaded = []
inst._p_changed = False
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_none_from_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_changed = None
# assigning None is ignored when dirty
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_true_from_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_changed = True
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_false_from_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_changed = False
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_assign_p_changed_none_when_sticky(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
def _test():
inst._p_changed = None
self.assertRaises(ValueError, _test)
def test_delete_p_changed_from_new(self):
inst = self._makeOne()
del inst._p_changed
self.assertEqual(inst._p_status, 'new')
def test_delete_p_changed_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
del inst._p_changed
self.assertEqual(inst._p_status, 'new')
def test_delete_p_changed_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
del inst._p_changed
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_delete_p_changed_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
jar._loaded = []
jar._registered = []
del inst._p_changed
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_delete_p_changed_from_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
del inst._p_changed
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test_delete_p_changed_when_sticky(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
def _test():
del inst._p_changed
self.assertRaises(ValueError, _test)
def test_assign_p_sticky_true_when_ghost(self):
inst = self._makeOne()
def _test():
inst._p_sticky = True
self.assertRaises(ValueError, _test)
def test_assign_p_sticky_false_when_ghost(self):
inst = self._makeOne()
def _test():
inst._p_sticky = False
self.assertRaises(ValueError, _test)
def test_assign_p_sticky_true_non_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
self.failUnless(inst._p_sticky)
def test_assign_p_sticky_false_non_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = False
self.failIf(inst._p_sticky)
def test__p_status_new(self):
inst = self._makeOne()
self.assertEqual(inst._p_status, 'new')
def test__p_status_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
self.assertEqual(inst._p_status, 'unsaved')
def test__p_status_ghost(self):
inst, jar, OID = self._makeOneWithJar()
self.assertEqual(inst._p_status, 'ghost')
def test__p_status_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
self.assertEqual(inst._p_status, 'changed')
def test__p_status_changed_sticky(self):
# 'sticky' is not a state, but a separate flag.
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
inst._p_sticky = True
self.assertEqual(inst._p_status, 'changed (sticky)')
def test__p_status_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
self.assertEqual(inst._p_status, 'saved')
def test__p_status_saved_sticky(self):
# 'sticky' is not a state, but a separate flag.
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
self.assertEqual(inst._p_status, 'saved (sticky)')
def test__p_mtime_no_serial(self):
inst = self._makeOne()
self.assertEqual(inst._p_mtime, None)
def test__p_mtime_w_serial(self):
from persistent.timestamp import TimeStamp
WHEN_TUPLE = (2011, 2, 15, 13, 33, 27.5)
ts = TimeStamp(*WHEN_TUPLE)
inst, jar, OID = self._makeOneWithJar()
inst._p_serial = ts.raw()
self.assertEqual(inst._p_mtime, ts.timeTime())
def test__p_state_new(self):
inst = self._makeOne()
self.assertEqual(inst._p_state, 0)
def test__p_state_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
self.assertEqual(inst._p_state, 0)
def test__p_state_ghost(self):
inst, jar, OID = self._makeOneWithJar()
self.assertEqual(inst._p_state, -1)
def test__p_state_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
self.assertEqual(inst._p_state, 1)
def test__p_state_changed_sticky(self):
# 'sticky' is not a state, but a separate flag.
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
inst._p_sticky = True
self.assertEqual(inst._p_state, 2)
def test__p_state_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
self.assertEqual(inst._p_state, 0)
def test__p_state_saved_sticky(self):
# 'sticky' is not a state, but a separate flag.
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
self.assertEqual(inst._p_state, 2)
def test_query_p_estimated_size(self):
inst = self._makeOne()
self.assertEqual(inst._p_estimated_size, 0)
def test_assign_p_estimated_size(self):
# XXX at the moment, we don't store this value.
inst = self._makeOne()
inst._p_estimated_size = 123
self.assertEqual(inst._p_estimated_size, 0)
def test___getattribute___p__names(self):
NAMES = ['_p_jar',
'_p_oid',
'_p_changed',
'_p_serial',
'_p_mtime',
'_p_state',
'_p_estimated_size',
'_p_sticky',
'_p_status',
]
inst, jar, OID = self._makeOneWithJar()
jar._cache._mru = []
for name in NAMES:
getattr(inst, name)
self.assertEqual(jar._cache._mru, [])
def test___getattribute__special_name(self):
from persistent.pyPersistence import SPECIAL_NAMES
inst, jar, OID = self._makeOneWithJar()
jar._cache._mru = []
for name in SPECIAL_NAMES:
getattr(inst, name, None)
self.assertEqual(jar._cache._mru, [])
def test___getattribute__normal_name_from_new(self):
class Derived(self._getTargetClass()):
normal = 'value'
inst = Derived()
self.assertEqual(getattr(inst, 'normal', None), 'value')
def test___getattribute__normal_name_from_unsaved(self):
class Derived(self._getTargetClass()):
normal = 'value'
inst = Derived()
inst._p_changed = True
self.assertEqual(getattr(inst, 'normal', None), 'value')
def test___getattribute__normal_name_from_ghost(self):
class Derived(self._getTargetClass()):
normal = 'value'
inst, jar, OID = self._makeOneWithJar(Derived)
jar._cache._mru = []
self.assertEqual(getattr(inst, 'normal', None), 'value')
self.assertEqual(jar._cache._mru, [OID])
def test___getattribute__normal_name_from_saved(self):
class Derived(self._getTargetClass()):
normal = 'value'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = False
jar._cache._mru = []
self.assertEqual(getattr(inst, 'normal', None), 'value')
self.assertEqual(jar._cache._mru, [OID])
def test___getattribute__normal_name_from_changed(self):
class Derived(self._getTargetClass()):
normal = 'value'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = True
jar._cache._mru = []
self.assertEqual(getattr(inst, 'normal', None), 'value')
self.assertEqual(jar._cache._mru, [OID])
def test___setattr___p__names(self):
from persistent.pyPersistence import _makeOctets
SERIAL = _makeOctets('\x01' * 8)
inst, jar, OID = self._makeOneWithJar()
NAMES = [('_p_jar', jar),
('_p_oid', OID),
('_p_changed', False),
('_p_serial', SERIAL),
('_p_estimated_size', 0),
('_p_sticky', False),
]
jar._cache._mru = []
for name, value in NAMES:
setattr(inst, name, value)
self.assertEqual(jar._cache._mru, [])
def test___setattr__normal_name_from_new(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst = Derived()
setattr(inst, 'normal', 'after')
self.assertEqual(getattr(inst, 'normal', None), 'after')
def test___setattr__normal_name_from_unsaved(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst = Derived()
inst._p_changed = True
setattr(inst, 'normal', 'after')
self.assertEqual(getattr(inst, 'normal', None), 'after')
def test___setattr__normal_name_from_ghost(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst, jar, OID = self._makeOneWithJar(Derived)
jar._cache._mru = []
setattr(inst, 'normal', 'after')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [OID])
self.assertEqual(getattr(inst, 'normal', None), 'after')
def test___setattr__normal_name_from_saved(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = False
jar._cache._mru = []
setattr(inst, 'normal', 'after')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [OID])
self.assertEqual(getattr(inst, 'normal', None), 'after')
def test___setattr__normal_name_from_changed(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = True
jar._cache._mru = []
jar._registered = []
setattr(inst, 'normal', 'after')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [])
self.assertEqual(getattr(inst, 'normal', None), 'after')
def test___delattr___p__names(self):
NAMES = ['_p_changed',
'_p_serial',
]
inst, jar, OID = self._makeOneWithJar()
jar._cache._mru = []
jar._registered = []
for name in NAMES:
delattr(inst, name)
self.assertEqual(jar._cache._mru, [])
self.assertEqual(jar._registered, [])
def test___delattr__normal_name_from_new(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
delattr(inst, 'normal')
self.assertEqual(getattr(inst, 'normal', None), 'before')
def test___delattr__normal_name_from_unsaved(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
inst._p_changed = True
delattr(inst, 'normal')
self.assertEqual(getattr(inst, 'normal', None), 'before')
def test___delattr__normal_name_from_ghost(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
jar._cache._mru = []
jar._registered = []
delattr(inst, 'normal')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [OID])
self.assertEqual(getattr(inst, 'normal', None), 'before')
def test___delattr__normal_name_from_saved(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = False
jar._cache._mru = []
jar._registered = []
delattr(inst, 'normal')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [OID])
self.assertEqual(getattr(inst, 'normal', None), 'before')
def test___delattr__normal_name_from_changed(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_changed = True
jar._cache._mru = []
jar._registered = []
delattr(inst, 'normal')
self.assertEqual(jar._cache._mru, [OID])
self.assertEqual(jar._registered, [])
self.assertEqual(getattr(inst, 'normal', None), 'before')
def test___getstate__(self):
inst = self._makeOne()
self.assertEqual(inst.__getstate__(), ())
def test___getstate___derived_w_dict(self):
class Derived(self._getTargetClass()):
pass
inst = Derived()
inst.foo = 'bar'
inst._p_baz = 'bam'
inst._v_qux = 'spam'
self.assertEqual(inst.__getstate__(), {'foo': 'bar'})
def test___setstate___empty(self):
inst = self._makeOne()
inst.__setstate__(()) # doesn't raise, but doesn't change anything
def test___setstate___nonempty(self):
from persistent.pyPersistence import _INITIAL_SERIAL
inst = self._makeOne()
self.assertRaises(ValueError, inst.__setstate__, {'bogus': 1})
self.assertEqual(inst._p_jar, None)
self.assertEqual(inst._p_oid, None)
self.assertEqual(inst._p_serial, _INITIAL_SERIAL)
self.assertEqual(inst._p_changed, None)
self.assertEqual(inst._p_sticky, False)
def test___setstate___nonempty_derived_w_dict(self):
class Derived(self._getTargetClass()):
pass
inst = Derived()
inst.foo = 'bar'
inst.__setstate__({'baz': 'bam'})
self.assertEqual(inst.__dict__, {'baz': 'bam'})
def test___reduce__(self):
from copy_reg import __newobj__
inst = self._makeOne()
first, second, third = inst.__reduce__()
self.failUnless(first is __newobj__)
self.assertEqual(second, (self._getTargetClass(),))
self.assertEqual(third, ())
def test___reduce__w_subclass_having_getstate(self):
from copy_reg import __newobj__
class Derived(self._getTargetClass()):
def __getstate__(self):
return {}
inst = Derived()
first, second, third = inst.__reduce__()
self.failUnless(first is __newobj__)
self.assertEqual(second, (Derived,))
self.assertEqual(third, {})
def test___reduce__w_subclass_having_gna_and_getstate(self):
from copy_reg import __newobj__
class Derived(self._getTargetClass()):
def __getnewargs__(self):
return ('a', 'b')
def __getstate__(self):
return {'foo': 'bar'}
inst = Derived()
first, second, third = inst.__reduce__()
self.failUnless(first is __newobj__)
self.assertEqual(second, (Derived, 'a', 'b'))
self.assertEqual(third, {'foo': 'bar'})
def test__p_activate_from_new(self):
inst = self._makeOne()
inst._p_activate()
self.assertEqual(inst._p_status, 'saved')
def test__p_activate_from_saved(self):
inst = self._makeOne()
inst._p_changed = False
inst._p_activate() # noop from 'unsaved' state
self.assertEqual(inst._p_status, 'saved')
def test__p_activate_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_activate() # noop from 'saved' state
self.assertEqual(inst._p_status, 'unsaved')
def test__p_deactivate_from_new(self):
inst = self._makeOne()
inst._p_deactivate()
self.assertEqual(inst._p_status, 'new')
def test__p_deactivate_from_new_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
inst._p_deactivate()
self.assertEqual(inst._p_status, 'new')
self.assertEqual(inst.__dict__, {'normal': 'after'})
def test__p_deactivate_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_deactivate()
# can't transition 'unsaved' -> 'new'
self.assertEqual(inst._p_status, 'unsaved')
def test__p_deactivate_from_unsaved_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
inst._p_changed = True
inst._p_deactivate()
# can't transition 'unsaved' -> 'new'
self.assertEqual(inst._p_status, 'unsaved')
self.assertEqual(inst.__dict__, {'normal': 'after'})
def test__p_deactivate_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_deactivate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_deactivate_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
jar._loaded = []
inst._p_deactivate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_deactivate_from_saved_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_activate()
jar._loaded = []
inst._p_deactivate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(inst.__dict__, {})
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_deactivate_from_changed(self):
class Derived(self._getTargetClass()):
normal = 'before'
inst, jar, OID = self._makeOneWithJar(Derived)
inst.normal = 'after'
jar._loaded = []
jar._registered = []
inst._p_deactivate()
# assigning None is ignored when dirty
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(inst.__dict__, {'normal': 'after'})
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_deactivate_from_changed_w_dict(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_deactivate()
# assigning None is ignored when dirty
self.assertEqual(inst._p_status, 'changed')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_deactivate_when_sticky(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
self.assertRaises(ValueError, inst._p_deactivate)
def test__p_invalidate_from_new(self):
inst = self._makeOne()
inst._p_invalidate()
self.assertEqual(inst._p_status, 'new')
def test__p_invalidate_from_new_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
inst._p_invalidate()
self.assertEqual(inst._p_status, 'new')
self.assertEqual(inst.__dict__, {})
def test__p_invalidate_from_unsaved(self):
inst = self._makeOne()
inst._p_changed = True
inst._p_invalidate()
self.assertEqual(inst._p_status, 'new')
def test__p_invalidate_from_unsaved_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst = Derived()
inst._p_changed = True
inst._p_invalidate()
self.assertEqual(inst._p_status, 'new')
self.assertEqual(inst.__dict__, {})
def test__p_invalidate_from_ghost(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_invalidate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_invalidate_from_saved(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
jar._loaded = []
jar._registered = []
inst._p_invalidate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_invalidate_from_saved_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_activate()
jar._loaded = []
jar._registered = []
inst._p_invalidate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(inst.__dict__, {})
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_invalidate_from_changed(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_invalidate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_invalidate_from_changed_w_dict(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
inst._p_activate()
inst._p_changed = True
jar._loaded = []
jar._registered = []
inst._p_invalidate()
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(inst.__dict__, {})
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._registered), [])
def test__p_invalidate_when_sticky(self):
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = False
inst._p_sticky = True
self.assertRaises(ValueError, inst._p_invalidate)
def test__p_getattr_w__p__names(self):
NAMES = ['_p_jar',
'_p_oid',
'_p_changed',
'_p_serial',
'_p_mtime',
'_p_state',
'_p_estimated_size',
'_p_sticky',
'_p_status',
]
inst, jar, OID = self._makeOneWithJar()
for name in NAMES:
self.failUnless(inst._p_getattr(name))
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._cache._mru), [])
def test__p_getattr_w_special_names(self):
from persistent.pyPersistence import SPECIAL_NAMES
inst, jar, OID = self._makeOneWithJar()
for name in SPECIAL_NAMES:
self.failUnless(inst._p_getattr(name))
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._cache._mru), [])
def test__p_getattr_w_normal_name(self):
inst, jar, OID = self._makeOneWithJar()
self.failIf(inst._p_getattr('normal'))
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._cache._mru), [OID])
def test__p_setattr_w__p__name(self):
from persistent.pyPersistence import _makeOctets
SERIAL = _makeOctets('\x01' * 8)
inst, jar, OID = self._makeOneWithJar()
self.failUnless(inst._p_setattr('_p_serial', SERIAL))
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(inst._p_serial, SERIAL)
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._cache._mru), [])
def test__p_setattr_w_normal_name(self):
inst, jar, OID = self._makeOneWithJar()
self.failIf(inst._p_setattr('normal', 'value'))
# _p_setattr doesn't do the actual write for normal names
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._cache._mru), [OID])
def test__p_delattr_w__p__names(self):
NAMES = ['_p_changed',
'_p_serial',
]
inst, jar, OID = self._makeOneWithJar()
inst._p_changed = True
jar._loaded = []
for name in NAMES:
self.failUnless(inst._p_delattr(name))
self.assertEqual(inst._p_status, 'ghost')
self.assertEqual(inst._p_changed, None)
self.assertEqual(list(jar._loaded), [])
self.assertEqual(list(jar._cache._mru), [])
def test__p_delattr_w_normal_name(self):
class Derived(self._getTargetClass()):
normal = 'before'
def __init__(self):
self.__dict__['normal'] = 'after'
inst, jar, OID = self._makeOneWithJar(Derived)
self.failIf(inst._p_delattr('normal'))
# _p_delattr doesn't do the actual delete for normal names
self.assertEqual(inst._p_status, 'saved')
self.assertEqual(list(jar._loaded), [OID])
self.assertEqual(list(jar._cache._mru), [OID])
##############################################################################
#
# Copyright (c) 2011 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
class TimeStampTests(unittest.TestCase):
def _getTargetClass(self):
from persistent.timestamp import TimeStamp
return TimeStamp
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_invalid_arglist(self):
BAD_ARGS = [(),
(1,),
(1, 2),
(1, 2, 3),
(1, 2, 3, 4),
(1, 2, 3, 4, 5),
('1', '2', '3', '4', '5', '6'),
(1, 2, 3, 4, 5, 6, 7),
]
for args in BAD_ARGS:
self.assertRaises((TypeError, ValueError), self._makeOne, *args)
def test_ctor_from_string(self):
from persistent.timestamp import _makeOctets
from persistent.timestamp import _makeUTC
ZERO = _makeUTC(1900, 1, 1, 0, 0, 0)
EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0)
DELTA = ZERO - EPOCH
DELTA_SECS = DELTA.days * 86400 + DELTA.seconds
SERIAL = _makeOctets('\x00' * 8)
ts = self._makeOne(SERIAL)
self.assertEqual(ts.raw(), SERIAL)
self.assertEqual(ts.year(), 1900)
self.assertEqual(ts.month(), 1)
self.assertEqual(ts.day(), 1)
self.assertEqual(ts.hour(), 0)
self.assertEqual(ts.minute(), 0)
self.assertEqual(ts.second(), 0.0)
self.assertEqual(ts.timeTime(), DELTA_SECS)
def test_ctor_from_elements(self):
from persistent.timestamp import _makeOctets
from persistent.timestamp import _makeUTC
ZERO = _makeUTC(1900, 1, 1, 0, 0, 0)
EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0)
DELTA = ZERO - EPOCH
DELTA_SECS = DELTA.days * 86400 + DELTA.seconds
SERIAL = _makeOctets('\x00' * 8)
ts = self._makeOne(1900, 1, 1, 0, 0, 0.0)
self.assertEqual(ts.raw(), SERIAL)
self.assertEqual(ts.year(), 1900)
self.assertEqual(ts.month(), 1)
self.assertEqual(ts.day(), 1)
self.assertEqual(ts.hour(), 0)
self.assertEqual(ts.minute(), 0)
self.assertEqual(ts.second(), 0.0)
self.assertEqual(ts.timeTime(), DELTA_SECS)
def test_laterThan_self_is_earlier(self):
from persistent.timestamp import _makeOctets
SERIAL1 = _makeOctets('\x01' * 8)
SERIAL2 = _makeOctets('\x02' * 8)
ts1 = self._makeOne(SERIAL1)
ts2 = self._makeOne(SERIAL2)
later = ts1.laterThan(ts2)
self.assertEqual(later.raw(), _makeOctets('\x02' * 7 + '\x03'))
def test_laterThan_self_is_later(self):
from persistent.timestamp import _makeOctets
SERIAL1 = _makeOctets('\x01' * 8)
SERIAL2 = _makeOctets('\x02' * 8)
ts1 = self._makeOne(SERIAL1)
ts2 = self._makeOne(SERIAL2)
later = ts2.laterThan(ts1)
self.failUnless(later is ts2)
##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import unittest
import os
if os.environ.get('USE_ZOPE_TESTING_DOCTEST'):
from zope.testing.doctest import DocTestSuite
else:
from doctest import DocTestSuite
def test_suite():
return DocTestSuite('persistent.wref')
if __name__ == '__main__':
unittest.main()
class ResettingJar(object):
"""Testing stub for _p_jar attribute.
"""
def __init__(self):
from persistent import PickleCache # XXX stub it!
from persistent.interfaces import IPersistentDataManager
from zope.interface import directlyProvides
self.cache = self._cache = PickleCache(self)
self.oid = 1
self.registered = {}
directlyProvides(self, IPersistentDataManager)
def add(self, obj):
import struct
obj._p_oid = struct.pack(">Q", self.oid)
self.oid += 1
obj._p_jar = self
self.cache[obj._p_oid] = obj
def close(self):
pass
# the following methods must be implemented to be a jar
def setklassstate(self):
# I don't know what this method does, but the pickle cache
# constructor calls it.
pass
def register(self, obj):
self.registered[obj] = 1
def setstate(self, obj):
# Trivial setstate() implementation that just re-initializes
# the object. This isn't what setstate() is supposed to do,
# but it suffices for the tests.
obj.__class__.__init__(obj)
class RememberingJar(object):
"""Testing stub for _p_jar attribute.
"""
def __init__(self):
from persistent import PickleCache # XXX stub it!
self.cache = PickleCache(self)
self.oid = 1
self.registered = {}
def add(self, obj):
import struct
obj._p_oid = struct.pack(">Q", self.oid)
self.oid += 1
obj._p_jar = self
self.cache[obj._p_oid] = obj
# Remember object's state for later.
self.obj = obj
self.remembered = obj.__getstate__()
def close(self):
pass
def fake_commit(self):
self.remembered = self.obj.__getstate__()
self.obj._p_changed = 0
# the following methods must be implemented to be a jar
def setklassstate(self):
# I don't know what this method does, but the pickle cache
# constructor calls it.
pass
def register(self, obj):
self.registered[obj] = 1
def setstate(self, obj):
# Trivial setstate() implementation that resets the object's
# state as of the time it was added to the jar.
# This isn't what setstate() is supposed to do,
# but it suffices for the tests.
obj.__setstate__(self.remembered)
##############################################################################
#
# Copyright (c) 2011 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
__all__ = ('TimeStamp',)
import datetime
import math
import struct
import sys
if sys.version_info < (2, 6):
_RAWTYPE = str
else:
_RAWTYPE = bytes
def _makeOctets(s):
if sys.version_info < (2, 6,):
return str(s)
if sys.version_info < (3,):
return bytes(s)
return bytes(s, 'ascii')
_ZERO = _makeOctets('\x00' * 8)
class _UTC(datetime.tzinfo):
def tzname(self):
return 'UTC'
def utcoffset(self, when):
return datetime.timedelta(0, 0, 0)
def dst(self):
return 0
def fromutc(self, dt):
return dt
def _makeUTC(y, mo, d, h, mi, s):
usec, sec = math.modf(s)
sec = int(sec)
usec = int(usec * 1e6)
return datetime.datetime(y, mo, d, h, mi, sec, usec, tzinfo=_UTC())
_EPOCH = _makeUTC(1970, 1, 1, 0, 0, 0)
_SCONV = 60.0 / (1<<16) / (1<<16)
def _makeRaw(year, month, day, hour, minute, second):
a = (((year - 1900) * 12 + month - 1) * 31 + day - 1)
a = (a * 24 + hour) * 60 + minute
b = int(second / _SCONV)
return struct.pack('>II', a, b)
def _parseRaw(octets):
a, b = struct.unpack('>II', octets)
minute = a % 60
hour = a // 60 % 24
day = a // (60 * 24) % 31 + 1
month = a // (60 * 24 * 31) % 12 + 1
year = a // (60 * 24 * 31 * 12) + 1900
second = b * _SCONV
return (year, month, day, hour, minute, second)
class TimeStamp(object):
__slots__ = ('_raw', '_elements')
def __init__(self, *args):
if len(args) == 1:
raw = args[0]
if not isinstance(raw, _RAWTYPE):
raise TypeError('Raw octets must be of type: %s' % _RAWTYPE)
if len(raw) != 8:
raise TypeError('Raw must be 8 octets')
self._raw = raw
self._elements = _parseRaw(raw)
elif len(args) == 6:
self._raw = _makeRaw(*args)
self._elements = args
else:
raise TypeError('Pass either a single 8-octet arg '
'or 5 integers and a float')
def raw(self):
return self._raw
def year(self):
return self._elements[0]
def month(self):
return self._elements[1]
def day(self):
return self._elements[2]
def hour(self):
return self._elements[3]
def minute(self):
return self._elements[4]
def second(self):
return self._elements[5]
def timeTime(self):
""" -> seconds since epoch, as a float.
"""
delta = _makeUTC(*self._elements) - _EPOCH
return delta.days * 86400.0 + delta.seconds
def laterThan(self, other):
""" Return a timestamp instance which is later than 'other'.
If self already qualifies, return self.
Otherwise, return a new instance one moment later than 'other'.
"""
if not isinstance(other, self.__class__):
raise ValueError()
if self._raw > other._raw:
return self
a, b = struct.unpack('>II', other._raw)
later = struct.pack('>II', a, b + 1)
return self.__class__(later)
try:
from persistent.TimeStamp import TimeStamp
except ImportError:
pass
##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""ZODB-based persistent weakrefs
$Id$
"""
__docformat__ = "reStructuredText"
from persistent import Persistent
import transaction
WeakRefMarker = object()
class WeakRef(object):
"""Persistent weak references
Persistent weak references are used much like Python weak
references. The major difference is that you can't specify an
object to be called when the object is removed from the database.
Here's an example. We'll start by creating a persistent object and
a reference to it:
>>> import persistent, ZODB.tests.MinPO
>>> import ZODB.tests.util
>>> ob = ZODB.tests.MinPO.MinPO()
>>> ref = WeakRef(ob)
>>> ref() is ob
True
The hash of the ref if the same as the hash of the referenced object:
>>> hash(ref) == hash(ob)
True
Two refs to the same object are equal:
>>> WeakRef(ob) == ref
True
>>> ob2 = ZODB.tests.MinPO.MinPO(1)
>>> WeakRef(ob2) == ref
False
Lets save the reference and the referenced object in a database:
>>> db = ZODB.tests.util.DB()
>>> conn1 = db.open()
>>> conn1.root()['ob'] = ob
>>> conn1.root()['ref'] = ref
>>> transaction.commit()
If we open a new connection, we can use the reference:
>>> conn2 = db.open()
>>> conn2.root()['ref']() is conn2.root()['ob']
True
>>> hash(conn2.root()['ref']) == hash(conn2.root()['ob'])
True
But if we delete the referenced object and pack:
>>> del conn2.root()['ob']
>>> transaction.commit()
>>> ZODB.tests.util.pack(db)
And then look in a new connection:
>>> conn3 = db.open()
>>> conn3.root()['ob']
Traceback (most recent call last):
...
KeyError: 'ob'
Trying to dereference the reference returns None:
>>> conn3.root()['ref']()
Trying to get a hash, raises a type error:
>>> hash(conn3.root()['ref'])
Traceback (most recent call last):
...
TypeError: Weakly-referenced object has gone away
Always explicitly close databases: :)
>>> db.close()
>>> del ob, ref, db, conn1, conn2, conn3
When multiple databases are in use, a weakref in one database may
point to an object in a different database. Let's create two new
databases to demonstrate this.
>>> dbA = ZODB.tests.util.DB(
... database_name = 'dbA',
... )
>>> dbB = ZODB.tests.util.DB(
... database_name = 'dbB',
... databases = dbA.databases,
... )
>>> connA1 = dbA.open()
>>> connB1 = connA1.get_connection('dbB')
Now create and add a new object and a weak reference, and add them
to different databases.
>>> ob = ZODB.tests.MinPO.MinPO()
>>> ref = WeakRef(ob)
>>> connA1.root()['ob'] = ob
>>> connA1.add(ob)
>>> connB1.root()['ref'] = ref
>>> transaction.commit()
After a succesful commit, the reference should know the oid,
database name and connection of the object.
>>> ref.oid == ob._p_oid
True
>>> ref.database_name == 'dbA'
True
>>> ref.dm is ob._p_jar is connA1
True
If we open new connections, we should be able to use the reference.
>>> connA2 = dbA.open()
>>> connB2 = connA2.get_connection('dbB')
>>> ref2 = connB2.root()['ref']
>>> ob2 = connA2.root()['ob']
>>> ref2() is ob2
True
>>> ref2.oid == ob2._p_oid
True
>>> ref2.database_name == 'dbA'
True
>>> ref2.dm is ob2._p_jar is connA2
True
Always explicitly close databases: :)
>>> dbA.close()
>>> dbB.close()
"""
# We set _p_oid to a marker so that the serialization system can
# provide special handling of weakrefs.
_p_oid = WeakRefMarker
def __init__(self, ob):
self._v_ob = ob
self.oid = ob._p_oid
self.dm = ob._p_jar
if self.dm is not None:
self.database_name = self.dm.db().database_name
def __call__(self):
try:
return self._v_ob
except AttributeError:
try:
self._v_ob = self.dm[self.oid]
except (KeyError, AttributeError):
return None
return self._v_ob
def __hash__(self):
self = self()
if self is None:
raise TypeError('Weakly-referenced object has gone away')
return hash(self)
def __eq__(self, other):
self = self()
if self is None:
raise TypeError('Weakly-referenced object has gone away')
other = other()
if other is None:
raise TypeError('Weakly-referenced object has gone away')
return self == other
class PersistentWeakKeyDictionary(Persistent):
"""Persistent weak key dictionary
This is akin to WeakKeyDictionaries. Note, however, that removal
of items is extremely lazy. See below.
We'll start by creating a PersistentWeakKeyDictionary and adding
some persistent objects to it.
>>> d = PersistentWeakKeyDictionary()
>>> import ZODB.tests.util
>>> p1 = ZODB.tests.util.P('p1')
>>> p2 = ZODB.tests.util.P('p2')
>>> p3 = ZODB.tests.util.P('p3')
>>> d[p1] = 1
>>> d[p2] = 2
>>> d[p3] = 3
We'll create an extra persistent object that's not in the dict:
>>> p4 = ZODB.tests.util.P('p4')
Now we'll excercise iteration and item access:
>>> l = [(str(k), d[k], d.get(k)) for k in d]
>>> l.sort()
>>> l
[('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)]
And the containment operator:
>>> [p in d for p in [p1, p2, p3, p4]]
[True, True, True, False]
We can add the dict and the referenced objects to a database:
>>> db = ZODB.tests.util.DB()
>>> conn1 = db.open()
>>> conn1.root()['p1'] = p1
>>> conn1.root()['d'] = d
>>> conn1.root()['p2'] = p2
>>> conn1.root()['p3'] = p3
>>> transaction.commit()
And things still work, as before:
>>> l = [(str(k), d[k], d.get(k)) for k in d]
>>> l.sort()
>>> l
[('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)]
>>> [p in d for p in [p1, p2, p3, p4]]
[True, True, True, False]
Likewise, we can read the objects from another connection and
things still work.
>>> conn2 = db.open()
>>> d = conn2.root()['d']
>>> p1 = conn2.root()['p1']
>>> p2 = conn2.root()['p2']
>>> p3 = conn2.root()['p3']
>>> l = [(str(k), d[k], d.get(k)) for k in d]
>>> l.sort()
>>> l
[('P(p1)', 1, 1), ('P(p2)', 2, 2), ('P(p3)', 3, 3)]
>>> [p in d for p in [p1, p2, p3, p4]]
[True, True, True, False]
Now, we'll delete one of the objects from the database, but *not*
from the dictionary:
>>> del conn2.root()['p2']
>>> transaction.commit()
And pack the database, so that the no-longer referenced p2 is
actually removed from the database.
>>> ZODB.tests.util.pack(db)
Now if we access the dictionary in a new connection, it no longer
has p2:
>>> conn3 = db.open()
>>> d = conn3.root()['d']
>>> l = [(str(k), d[k], d.get(k)) for k in d]
>>> l.sort()
>>> l
[('P(p1)', 1, 1), ('P(p3)', 3, 3)]
It's worth nothing that that the versions of the dictionary in
conn1 and conn2 still have p2, because p2 is still in the caches
for those connections.
Always explicitly close databases: :)
>>> db.close()
"""
# TODO: It's expensive trying to load dead objects from the database.
# It would be helpful if the data manager/connection cached these.
def __init__(self, adict=None, **kwargs):
self.data = {}
if adict is not None:
keys = getattr(adict, "keys", None)
if keys is None:
adict = dict(adict)
self.update(adict)
if kwargs:
self.update(kwargs)
def __getstate__(self):
state = Persistent.__getstate__(self)
state['data'] = state['data'].items()
return state
def __setstate__(self, state):
state['data'] = dict([
(k, v) for (k, v) in state['data']
if k() is not None
])
Persistent.__setstate__(self, state)
def __setitem__(self, key, value):
self.data[WeakRef(key)] = value
def __getitem__(self, key):
return self.data[WeakRef(key)]
def __delitem__(self, key):
del self.data[WeakRef(key)]
def get(self, key, default=None):
"""D.get(k[, d]) -> D[k] if k in D, else d.
>>> import ZODB.tests.util
>>> key = ZODB.tests.util.P("key")
>>> missing = ZODB.tests.util.P("missing")
>>> d = PersistentWeakKeyDictionary([(key, 1)])
>>> d.get(key)
1
>>> d.get(missing)
>>> d.get(missing, 12)
12
"""
return self.data.get(WeakRef(key), default)
def __contains__(self, key):
return WeakRef(key) in self.data
def __iter__(self):
for k in self.data:
yield k()
def update(self, adict):
if isinstance(adict, PersistentWeakKeyDictionary):
self.data.update(adict.update)
else:
for k, v in adict.items():
self.data[WeakRef(k)] = v
# TODO: May need more methods, and tests.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment