Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
P
persistent
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
persistent
Commits
39c1f033
Commit
39c1f033
authored
May 19, 2015
by
Tres Seaver
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #20 from NextThought/zodb-on-pypy-support
Support for ZODB on PyPy
parents
7f673b53
1310dce6
Changes
10
Show whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
1386 additions
and
229 deletions
+1386
-229
.gitignore
.gitignore
+2
-0
CHANGES.rst
CHANGES.rst
+5
-0
persistent/persistence.py
persistent/persistence.py
+214
-99
persistent/picklecache.py
persistent/picklecache.py
+224
-82
persistent/ring.py
persistent/ring.py
+226
-0
persistent/tests/test_persistence.py
persistent/tests/test_persistence.py
+227
-6
persistent/tests/test_picklecache.py
persistent/tests/test_picklecache.py
+285
-28
persistent/tests/test_ring.py
persistent/tests/test_ring.py
+157
-0
persistent/tests/test_timestamp.py
persistent/tests/test_timestamp.py
+20
-4
tox.ini
tox.ini
+26
-10
No files found.
.gitignore
View file @
39c1f033
...
@@ -13,3 +13,5 @@ nosetests.xml
...
@@ -13,3 +13,5 @@ nosetests.xml
coverage.xml
coverage.xml
*.egg-info
*.egg-info
.installed.cfg
.installed.cfg
.dir-locals.el
dist
CHANGES.rst
View file @
39c1f033
...
@@ -4,6 +4,11 @@
...
@@ -4,6 +4,11 @@
4.0.10 (unreleased)
4.0.10 (unreleased)
-------------------
-------------------
- The Python implementation of ``Persistent`` and ``PickleCache`` now
behave more similarly to the C implementation. In particular, the
Python version can now run the complete ZODB and ZEO test suites.
- Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms.
- Fix the hashcode of the Python ``TimeStamp`` on 32-bit platforms.
4.0.9 (2015-04-08)
4.0.9 (2015-04-08)
...
...
persistent/persistence.py
View file @
39c1f033
...
@@ -27,6 +27,8 @@ from persistent.timestamp import _ZERO
...
@@ -27,6 +27,8 @@ from persistent.timestamp import _ZERO
from
persistent._compat
import
copy_reg
from
persistent._compat
import
copy_reg
from
persistent._compat
import
intern
from
persistent._compat
import
intern
from
.
import
ring
_INITIAL_SERIAL
=
_ZERO
_INITIAL_SERIAL
=
_ZERO
...
@@ -37,20 +39,24 @@ _STICKY = 0x0002
...
@@ -37,20 +39,24 @@ _STICKY = 0x0002
_OGA
=
object
.
__getattribute__
_OGA
=
object
.
__getattribute__
_OSA
=
object
.
__setattr__
_OSA
=
object
.
__setattr__
# These names can be used from a ghost without causing it to be activated.
# These names can be used from a ghost without causing it to be
# activated. These are standardized with the C implementation
SPECIAL_NAMES
=
(
'__class__'
,
SPECIAL_NAMES
=
(
'__class__'
,
'__del__'
,
'__del__'
,
'__dict__'
,
'__dict__'
,
'__of__'
,
'__of__'
,
'__setstate__'
'__setstate__'
,)
)
# And this is an implementation detail of this class; it holds
# the standard names plus the slot names, allowing for just one
# check in __getattribute__
_SPECIAL_NAMES
=
set
(
SPECIAL_NAMES
)
@
implementer
(
IPersistent
)
@
implementer
(
IPersistent
)
class
Persistent
(
object
):
class
Persistent
(
object
):
""" Pure Python implmentation of Persistent base class
""" Pure Python implmentation of Persistent base class
"""
"""
__slots__
=
(
'__jar'
,
'__oid'
,
'__serial'
,
'__flags'
,
'__size'
)
__slots__
=
(
'__jar'
,
'__oid'
,
'__serial'
,
'__flags'
,
'__size'
,
'__ring'
,
)
def
__new__
(
cls
,
*
args
,
**
kw
):
def
__new__
(
cls
,
*
args
,
**
kw
):
inst
=
super
(
Persistent
,
cls
).
__new__
(
cls
)
inst
=
super
(
Persistent
,
cls
).
__new__
(
cls
)
...
@@ -63,59 +69,69 @@ class Persistent(object):
...
@@ -63,59 +69,69 @@ class Persistent(object):
_OSA
(
inst
,
'_Persistent__serial'
,
None
)
_OSA
(
inst
,
'_Persistent__serial'
,
None
)
_OSA
(
inst
,
'_Persistent__flags'
,
None
)
_OSA
(
inst
,
'_Persistent__flags'
,
None
)
_OSA
(
inst
,
'_Persistent__size'
,
0
)
_OSA
(
inst
,
'_Persistent__size'
,
0
)
_OSA
(
inst
,
'_Persistent__ring'
,
None
)
return
inst
return
inst
# _p_jar: see IPersistent.
# _p_jar: see IPersistent.
def
_get_jar
(
self
):
def
_get_jar
(
self
):
return
self
.
__jar
return
_OGA
(
self
,
'_Persistent__jar'
)
def
_set_jar
(
self
,
value
):
def
_set_jar
(
self
,
value
):
if
self
.
__jar
is
not
None
:
jar
=
_OGA
(
self
,
'_Persistent__jar'
)
if
self
.
__jar
!=
value
:
if
self
.
_p_is_in_cache
(
jar
)
and
value
is
not
None
and
jar
!=
value
:
raise
ValueError
(
'Already assigned a data manager'
)
# The C implementation only forbids changing the jar
else
:
# if we're already in a cache. Match its error message
self
.
__jar
=
value
raise
ValueError
(
'can not change _p_jar of cached object'
)
self
.
__flags
=
0
if
_OGA
(
self
,
'_Persistent__jar'
)
!=
value
:
_OSA
(
self
,
'_Persistent__jar'
,
value
)
_OSA
(
self
,
'_Persistent__flags'
,
0
)
def
_del_jar
(
self
):
def
_del_jar
(
self
):
jar
=
self
.
__jar
jar
=
_OGA
(
self
,
'_Persistent__jar'
)
oid
=
self
.
__oid
if
jar
is
not
None
:
if
jar
is
not
None
:
if
oid
and
jar
.
_cache
.
get
(
oid
):
if
self
.
_p_is_in_cache
(
jar
):
raise
ValueError
(
"can't delete _p_jar of cached object"
)
raise
ValueError
(
"can't delete _p_jar of cached object"
)
self
.
__setattr__
(
'_Persistent__jar'
,
None
)
_OSA
(
self
,
'_Persistent__jar'
,
None
)
self
.
__flags
=
None
_OSA
(
self
,
'_Persistent__flags'
,
None
)
_p_jar
=
property
(
_get_jar
,
_set_jar
,
_del_jar
)
_p_jar
=
property
(
_get_jar
,
_set_jar
,
_del_jar
)
# _p_oid: see IPersistent.
# _p_oid: see IPersistent.
def
_get_oid
(
self
):
def
_get_oid
(
self
):
return
self
.
__oid
return
_OGA
(
self
,
'_Persistent__oid'
)
def
_set_oid
(
self
,
value
):
def
_set_oid
(
self
,
value
):
if
value
==
self
.
__oid
:
if
value
==
_OGA
(
self
,
'_Persistent__oid'
)
:
return
return
if
value
is
not
None
:
# The C implementation allows *any* value to be
if
not
isinstance
(
value
,
OID_TYPE
):
# used as the _p_oid.
raise
ValueError
(
'Invalid OID type: %s'
%
value
)
#if value is not None:
if
self
.
__jar
is
not
None
and
self
.
__oid
is
not
None
:
# if not isinstance(value, OID_TYPE):
raise
ValueError
(
'Already assigned an OID by our jar'
)
# raise ValueError('Invalid OID type: %s' % value)
self
.
__oid
=
value
# The C implementation only forbids changing the OID
# if we're in a cache, regardless of what the current
# value or jar is
if
self
.
_p_is_in_cache
():
# match the C error message
raise
ValueError
(
'can not change _p_oid of cached object'
)
_OSA
(
self
,
'_Persistent__oid'
,
value
)
def
_del_oid
(
self
):
def
_del_oid
(
self
):
jar
=
self
.
__jar
jar
=
_OGA
(
self
,
'_Persistent__jar'
)
oid
=
self
.
__oid
oid
=
_OGA
(
self
,
'_Persistent__oid'
)
if
jar
is
not
None
:
if
jar
is
not
None
:
if
oid
and
jar
.
_cache
.
get
(
oid
):
if
oid
and
jar
.
_cache
.
get
(
oid
):
raise
ValueError
(
'Cannot delete _p_oid of cached object'
)
raise
ValueError
(
'Cannot delete _p_oid of cached object'
)
self
.
__oid
=
None
_OSA
(
self
,
'_Persistent__oid'
,
None
)
_p_oid
=
property
(
_get_oid
,
_set_oid
,
_del_oid
)
_p_oid
=
property
(
_get_oid
,
_set_oid
,
_del_oid
)
# _p_serial: see IPersistent.
# _p_serial: see IPersistent.
def
_get_serial
(
self
):
def
_get_serial
(
self
):
if
self
.
__serial
is
not
None
:
serial
=
_OGA
(
self
,
'_Persistent__serial'
)
return
self
.
__serial
if
serial
is
not
None
:
return
serial
return
_INITIAL_SERIAL
return
_INITIAL_SERIAL
def
_set_serial
(
self
,
value
):
def
_set_serial
(
self
,
value
):
...
@@ -123,23 +139,24 @@ class Persistent(object):
...
@@ -123,23 +139,24 @@ class Persistent(object):
raise
ValueError
(
'Invalid SERIAL type: %s'
%
value
)
raise
ValueError
(
'Invalid SERIAL type: %s'
%
value
)
if
len
(
value
)
!=
8
:
if
len
(
value
)
!=
8
:
raise
ValueError
(
'SERIAL must be 8 octets'
)
raise
ValueError
(
'SERIAL must be 8 octets'
)
self
.
__serial
=
value
_OSA
(
self
,
'_Persistent__serial'
,
value
)
def
_del_serial
(
self
):
def
_del_serial
(
self
):
self
.
__serial
=
None
_OSA
(
self
,
'_Persistent__serial'
,
None
)
_p_serial
=
property
(
_get_serial
,
_set_serial
,
_del_serial
)
_p_serial
=
property
(
_get_serial
,
_set_serial
,
_del_serial
)
# _p_changed: see IPersistent.
# _p_changed: see IPersistent.
def
_get_changed
(
self
):
def
_get_changed
(
self
):
if
self
.
__jar
is
None
:
if
_OGA
(
self
,
'_Persistent__jar'
)
is
None
:
return
False
return
False
if
self
.
__flags
is
None
:
# ghost
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
# ghost
return
None
return
None
return
bool
(
self
.
__
flags
&
_CHANGED
)
return
bool
(
flags
&
_CHANGED
)
def
_set_changed
(
self
,
value
):
def
_set_changed
(
self
,
value
):
if
self
.
__flags
is
None
:
if
_OGA
(
self
,
'_Persistent__flags'
)
is
None
:
if
value
:
if
value
:
self
.
_p_activate
()
self
.
_p_activate
()
self
.
_p_set_changed_flag
(
value
)
self
.
_p_set_changed_flag
(
value
)
...
@@ -156,23 +173,31 @@ class Persistent(object):
...
@@ -156,23 +173,31 @@ class Persistent(object):
# _p_mtime
# _p_mtime
def
_get_mtime
(
self
):
def
_get_mtime
(
self
):
if
self
.
__serial
is
not
None
:
# The C implementation automatically unghostifies the object
ts
=
TimeStamp
(
self
.
__serial
)
# when _p_mtime is accessed.
self
.
_p_activate
()
self
.
_p_accessed
()
serial
=
_OGA
(
self
,
'_Persistent__serial'
)
if
serial
is
not
None
:
ts
=
TimeStamp
(
serial
)
return
ts
.
timeTime
()
return
ts
.
timeTime
()
_p_mtime
=
property
(
_get_mtime
)
_p_mtime
=
property
(
_get_mtime
)
# _p_state
# _p_state
def
_get_state
(
self
):
def
_get_state
(
self
):
if
self
.
__jar
is
None
:
# Note the use of OGA and caching to avoid recursive calls to __getattribute__:
# __getattribute__ calls _p_accessed calls cache.mru() calls _p_state
if
_OGA
(
self
,
'_Persistent__jar'
)
is
None
:
return
UPTODATE
return
UPTODATE
if
self
.
__flags
is
None
:
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
return
GHOST
return
GHOST
if
self
.
__
flags
&
_CHANGED
:
if
flags
&
_CHANGED
:
result
=
CHANGED
result
=
CHANGED
else
:
else
:
result
=
UPTODATE
result
=
UPTODATE
if
self
.
__
flags
&
_STICKY
:
if
flags
&
_STICKY
:
return
STICKY
return
STICKY
return
result
return
result
...
@@ -180,18 +205,18 @@ class Persistent(object):
...
@@ -180,18 +205,18 @@ class Persistent(object):
# _p_estimated_size: XXX don't want to reserve the space?
# _p_estimated_size: XXX don't want to reserve the space?
def
_get_estimated_size
(
self
):
def
_get_estimated_size
(
self
):
return
self
.
__size
*
64
return
_OGA
(
self
,
'_Persistent__size'
)
*
64
def
_set_estimated_size
(
self
,
value
):
def
_set_estimated_size
(
self
,
value
):
if
isinstance
(
value
,
int
):
if
isinstance
(
value
,
int
):
if
value
<
0
:
if
value
<
0
:
raise
ValueError
(
'_p_estimated_size must not be negative'
)
raise
ValueError
(
'_p_estimated_size must not be negative'
)
self
.
__size
=
_estimated_size_in_24_bits
(
value
)
_OSA
(
self
,
'_Persistent__size'
,
_estimated_size_in_24_bits
(
value
)
)
else
:
else
:
raise
TypeError
(
"_p_estimated_size must be an integer"
)
raise
TypeError
(
"_p_estimated_size must be an integer"
)
def
_del_estimated_size
(
self
):
def
_del_estimated_size
(
self
):
self
.
__size
=
0
_OSA
(
self
,
'_Persistent__size'
,
0
)
_p_estimated_size
=
property
(
_p_estimated_size
=
property
(
_get_estimated_size
,
_set_estimated_size
,
_del_estimated_size
)
_get_estimated_size
,
_set_estimated_size
,
_del_estimated_size
)
...
@@ -199,28 +224,32 @@ class Persistent(object):
...
@@ -199,28 +224,32 @@ class Persistent(object):
# The '_p_sticky' property is not (yet) part of the API: for now,
# The '_p_sticky' property is not (yet) part of the API: for now,
# it exists to simplify debugging and testing assertions.
# it exists to simplify debugging and testing assertions.
def
_get_sticky
(
self
):
def
_get_sticky
(
self
):
if
self
.
__flags
is
None
:
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
return
False
return
False
return
bool
(
self
.
__
flags
&
_STICKY
)
return
bool
(
flags
&
_STICKY
)
def
_set_sticky
(
self
,
value
):
def
_set_sticky
(
self
,
value
):
if
self
.
__flags
is
None
:
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
raise
ValueError
(
'Ghost'
)
raise
ValueError
(
'Ghost'
)
if
value
:
if
value
:
self
.
__
flags
|=
_STICKY
flags
|=
_STICKY
else
:
else
:
self
.
__flags
&=
~
_STICKY
flags
&=
~
_STICKY
_OSA
(
self
,
'_Persistent__flags'
,
flags
)
_p_sticky
=
property
(
_get_sticky
,
_set_sticky
)
_p_sticky
=
property
(
_get_sticky
,
_set_sticky
)
# The '_p_status' property is not (yet) part of the API: for now,
# The '_p_status' property is not (yet) part of the API: for now,
# it exists to simplify debugging and testing assertions.
# it exists to simplify debugging and testing assertions.
def
_get_status
(
self
):
def
_get_status
(
self
):
if
self
.
__jar
is
None
:
if
_OGA
(
self
,
'_Persistent__jar'
)
is
None
:
return
'unsaved'
return
'unsaved'
if
self
.
__flags
is
None
:
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
return
'ghost'
return
'ghost'
if
self
.
__
flags
&
_STICKY
:
if
flags
&
_STICKY
:
return
'sticky'
return
'sticky'
if
self
.
__
flags
&
_CHANGED
:
if
flags
&
_CHANGED
:
return
'changed'
return
'changed'
return
'saved'
return
'saved'
...
@@ -230,16 +259,16 @@ class Persistent(object):
...
@@ -230,16 +259,16 @@ class Persistent(object):
def
__getattribute__
(
self
,
name
):
def
__getattribute__
(
self
,
name
):
""" See IPersistent.
""" See IPersistent.
"""
"""
if
(
not
name
.
startswith
(
'_Persistent__'
)
and
oga
=
_OGA
not
name
.
startswith
(
'_p_'
)
and
if
(
not
name
.
startswith
(
'_p_'
)
and
name
not
in
SPECIAL_NAMES
):
name
not
in
_
SPECIAL_NAMES
):
if
_OGA
(
self
,
'_Persistent__flags'
)
is
None
:
if
oga
(
self
,
'_Persistent__flags'
)
is
None
:
_OGA
(
self
,
'_p_activate'
)()
oga
(
self
,
'_p_activate'
)()
_OGA
(
self
,
'_p_accessed'
)()
oga
(
self
,
'_p_accessed'
)()
return
_OGA
(
self
,
name
)
return
oga
(
self
,
name
)
def
__setattr__
(
self
,
name
,
value
):
def
__setattr__
(
self
,
name
,
value
):
special_name
=
(
name
.
startswith
(
'_Persistent__'
)
or
special_name
=
(
name
in
_SPECIAL_NAMES
or
name
.
startswith
(
'_p_'
))
name
.
startswith
(
'_p_'
))
volatile
=
name
.
startswith
(
'_v_'
)
volatile
=
name
.
startswith
(
'_v_'
)
if
not
special_name
:
if
not
special_name
:
...
@@ -259,7 +288,7 @@ class Persistent(object):
...
@@ -259,7 +288,7 @@ class Persistent(object):
_OGA
(
self
,
'_p_register'
)()
_OGA
(
self
,
'_p_register'
)()
def
__delattr__
(
self
,
name
):
def
__delattr__
(
self
,
name
):
special_name
=
(
name
.
startswith
(
'_Persistent__'
)
or
special_name
=
(
name
in
_SPECIAL_NAMES
or
name
.
startswith
(
'_p_'
))
name
.
startswith
(
'_p_'
))
if
not
special_name
:
if
not
special_name
:
if
_OGA
(
self
,
'_Persistent__flags'
)
is
None
:
if
_OGA
(
self
,
'_Persistent__flags'
)
is
None
:
...
@@ -315,7 +344,9 @@ class Persistent(object):
...
@@ -315,7 +344,9 @@ class Persistent(object):
raise
TypeError
(
'No instance dict'
)
raise
TypeError
(
'No instance dict'
)
idict
.
clear
()
idict
.
clear
()
for
k
,
v
in
inst_dict
.
items
():
for
k
,
v
in
inst_dict
.
items
():
idict
[
intern
(
k
)]
=
v
# Normally the keys for instance attributes are interned.
# Do that here, but only if it is possible to do so.
idict
[
intern
(
k
)
if
type
(
k
)
is
str
else
k
]
=
v
slotnames
=
self
.
_slotnames
()
slotnames
=
self
.
_slotnames
()
if
slotnames
:
if
slotnames
:
for
k
,
v
in
slots
.
items
():
for
k
,
v
in
slots
.
items
():
...
@@ -331,36 +362,85 @@ class Persistent(object):
...
@@ -331,36 +362,85 @@ class Persistent(object):
def
_p_activate
(
self
):
def
_p_activate
(
self
):
""" See IPersistent.
""" See IPersistent.
"""
"""
before
=
self
.
__flags
oga
=
_OGA
if
self
.
__flags
is
None
or
self
.
_p_state
<
0
:
# Only do this if we're a ghost
before
=
oga
(
self
,
'_Persistent__flags'
)
self
.
__flags
=
0
if
before
is
None
:
# Only do this if we're a ghost
if
self
.
__jar
is
not
None
and
self
.
__oid
is
not
None
:
# Begin by marking up-to-date in case we bail early
_OSA
(
self
,
'_Persistent__flags'
,
0
)
jar
=
oga
(
self
,
'_Persistent__jar'
)
if
jar
is
None
:
return
oid
=
oga
(
self
,
'_Persistent__oid'
)
if
oid
is
None
:
return
# If we're actually going to execute a set-state,
# mark as changed to prevent any recursive call
# (actually, our earlier check that we're a ghost should
# prevent this, but the C implementation sets it to changed
# while calling jar.setstate, and this is observable to clients).
# The main point of this is to prevent changes made during
# setstate from registering the object with the jar.
_OSA
(
self
,
'_Persistent__flags'
,
CHANGED
)
try
:
try
:
self
.
__
jar
.
setstate
(
self
)
jar
.
setstate
(
self
)
except
:
except
:
self
.
__flags
=
before
_OSA
(
self
,
'_Persistent__flags'
,
before
)
raise
raise
else
:
# If we succeed, no matter what the implementation
# of setstate did, mark ourself as up-to-date. The
# C implementation unconditionally does this.
_OSA
(
self
,
'_Persistent__flags'
,
0
)
# up-to-date
# In the C implementation, _p_invalidate winds up calling
# _p_deactivate. There are ZODB tests that depend on this;
# it's not documented but there may be code in the wild
# that does as well
def
_p_deactivate
(
self
):
def
_p_deactivate
(
self
):
""" See IPersistent.
""" See IPersistent.
"""
"""
if
self
.
__flags
is
not
None
and
not
self
.
__flags
:
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
self
.
_p_invalidate
()
if
flags
is
not
None
and
not
flags
:
self
.
_p_invalidate_deactivate_helper
()
def
_p_invalidate
(
self
):
def
_p_invalidate
(
self
):
""" See IPersistent.
""" See IPersistent.
"""
"""
if
self
.
__jar
is
not
None
:
# If we think we have changes, we must pretend
if
self
.
__flags
is
not
None
:
# like we don't so that deactivate does its job
self
.
__flags
=
None
_OSA
(
self
,
'_Persistent__flags'
,
0
)
self
.
_p_deactivate
()
def
_p_invalidate_deactivate_helper
(
self
):
jar
=
_OGA
(
self
,
'_Persistent__jar'
)
if
jar
is
None
:
return
if
_OGA
(
self
,
'_Persistent__flags'
)
is
not
None
:
_OSA
(
self
,
'_Persistent__flags'
,
None
)
idict
=
getattr
(
self
,
'__dict__'
,
None
)
idict
=
getattr
(
self
,
'__dict__'
,
None
)
if
idict
is
not
None
:
if
idict
is
not
None
:
idict
.
clear
()
idict
.
clear
()
# Implementation detail: deactivating/invalidating
# updates the size of the cache (if we have one)
# by telling it this object no longer takes any bytes
# (-1 is a magic number to compensate for the implementation,
# which always adds one to the size given)
try
:
cache
=
jar
.
_cache
except
AttributeError
:
pass
else
:
cache
.
update_object_size_estimation
(
_OGA
(
self
,
'_Persistent__oid'
),
-
1
)
# See notes in PickleCache.sweep for why we have to do this
cache
.
_persistent_deactivate_ran
=
True
def
_p_getattr
(
self
,
name
):
def
_p_getattr
(
self
,
name
):
""" See IPersistent.
""" See IPersistent.
"""
"""
if
name
.
startswith
(
'_p_'
)
or
name
in
SPECIAL_NAMES
:
if
name
.
startswith
(
'_p_'
)
or
name
in
_
SPECIAL_NAMES
:
return
True
return
True
self
.
_p_activate
()
self
.
_p_activate
()
self
.
_p_accessed
()
self
.
_p_accessed
()
...
@@ -389,18 +469,22 @@ class Persistent(object):
...
@@ -389,18 +469,22 @@ class Persistent(object):
# Helper methods: not APIs: we name them with '_p_' to bypass
# Helper methods: not APIs: we name them with '_p_' to bypass
# the __getattribute__ bit which bumps the cache.
# the __getattribute__ bit which bumps the cache.
def
_p_register
(
self
):
def
_p_register
(
self
):
if
self
.
__jar
is
not
None
and
self
.
__oid
is
not
None
:
jar
=
_OGA
(
self
,
'_Persistent__jar'
)
self
.
__jar
.
register
(
self
)
if
jar
is
not
None
and
_OGA
(
self
,
'_Persistent__oid'
)
is
not
None
:
jar
.
register
(
self
)
def
_p_set_changed_flag
(
self
,
value
):
def
_p_set_changed_flag
(
self
,
value
):
if
value
:
if
value
:
before
=
self
.
__flags
before
=
_OGA
(
self
,
'_Persistent__flags'
)
after
=
self
.
__flags
|
_CHANGED
after
=
before
|
_CHANGED
if
before
!=
after
:
if
before
!=
after
:
self
.
_p_register
()
self
.
_p_register
()
self
.
__flags
=
after
_OSA
(
self
,
'_Persistent__flags'
,
after
)
else
:
else
:
self
.
__flags
&=
~
_CHANGED
flags
=
_OGA
(
self
,
'_Persistent__flags'
)
flags
&=
~
_CHANGED
_OSA
(
self
,
'_Persistent__flags'
,
flags
)
def
_p_accessed
(
self
):
def
_p_accessed
(
self
):
# Notify the jar's pickle cache that we have been accessed.
# Notify the jar's pickle cache that we have been accessed.
...
@@ -408,21 +492,52 @@ class Persistent(object):
...
@@ -408,21 +492,52 @@ class Persistent(object):
# detail, the '_cache' attribute of the jar. We made it a
# detail, the '_cache' attribute of the jar. We made it a
# private API to avoid the cycle of keeping a reference to
# private API to avoid the cycle of keeping a reference to
# the cache on the persistent object.
# the cache on the persistent object.
if
(
self
.
__jar
is
not
None
and
self
.
__oid
is
not
None
and
# The below is the equivalent of this, but avoids
self
.
_p_state
>=
0
):
# several recursive through __getattribute__, especially for _p_state,
# This scenario arises in ZODB: ZODB.serialize.ObjectWriter
# and benchmarks much faster
#
# if(self.__jar is None or
# self.__oid is None or
# self._p_state < 0 ): return
oga
=
_OGA
jar
=
oga
(
self
,
'_Persistent__jar'
)
if
jar
is
None
:
return
oid
=
oga
(
self
,
'_Persistent__oid'
)
if
oid
is
None
:
return
flags
=
oga
(
self
,
'_Persistent__flags'
)
if
flags
is
None
:
# ghost
return
# The KeyError arises in ZODB: ZODB.serialize.ObjectWriter
# can assign a jar and an oid to newly seen persistent objects,
# can assign a jar and an oid to newly seen persistent objects,
# but because they are newly created, they aren't in the
# but because they are newly created, they aren't in the
# pickle cache yet. There doesn't seem to be a way to distinguish
# pickle cache yet. There doesn't seem to be a way to distinguish
# that at this level, all we can do is catch it
# that at this level, all we can do is catch it.
# The AttributeError arises in ZODB test cases
try
:
try
:
self
.
__jar
.
_cache
.
mru
(
self
.
__
oid
)
jar
.
_cache
.
mru
(
oid
)
except
KeyError
:
except
(
AttributeError
,
KeyError
)
:
pass
pass
def
_p_is_in_cache
(
self
,
jar
=
None
):
oid
=
_OGA
(
self
,
'_Persistent__oid'
)
if
not
oid
:
return
False
jar
=
jar
or
_OGA
(
self
,
'_Persistent__jar'
)
cache
=
getattr
(
jar
,
'_cache'
,
None
)
if
cache
is
not
None
:
return
cache
.
get
(
oid
)
is
self
def
_estimated_size_in_24_bits
(
value
):
def
_estimated_size_in_24_bits
(
value
):
if
value
>
1073741696
:
if
value
>
1073741696
:
return
16777215
return
16777215
return
(
value
//
64
)
+
1
return
(
value
//
64
)
+
1
_SPECIAL_NAMES
.
update
([
intern
(
'_Persistent'
+
x
)
for
x
in
Persistent
.
__slots__
])
persistent/picklecache.py
View file @
39c1f033
...
@@ -13,36 +13,95 @@
...
@@ -13,36 +13,95 @@
##############################################################################
##############################################################################
import
gc
import
gc
import
weakref
import
weakref
import
sys
from
zope.interface
import
implementer
from
zope.interface
import
implementer
from
persistent.interfaces
import
CHANGED
from
persistent.interfaces
import
GHOST
from
persistent.interfaces
import
GHOST
from
persistent.interfaces
import
IPickleCache
from
persistent.interfaces
import
IPickleCache
from
persistent.interfaces
import
STICKY
from
persistent.interfaces
import
OID_TYPE
from
persistent.interfaces
import
OID_TYPE
from
persistent.interfaces
import
UPTODATE
from
persistent
import
Persistent
from
persistent.persistence
import
_estimated_size_in_24_bits
class
RingNode
(
object
):
# Tests may modify this to add additional types
# 32 byte fixed size wrapper.
_CACHEABLE_TYPES
=
(
type
,
Persistent
)
__slots__
=
(
'object'
,
'next'
,
'prev'
)
_SWEEPABLE_TYPES
=
(
Persistent
,)
def
__init__
(
self
,
object
,
next
=
None
,
prev
=
None
):
self
.
object
=
object
# The Python PickleCache implementation keeps track of the objects it
self
.
next
=
next
# is caching in a WeakValueDictionary. The number of objects in the
self
.
prev
=
prev
# cache (in this dictionary) is exposed as the len of the cache. Under
# non-refcounted implementations like PyPy, the weak references in
# this dictionary are only cleared when the garbage collector runs.
# Thus, after an incrgc, the len of the cache is incorrect for some
# period of time unless we ask the GC to run.
# Furthermore, evicted objects can stay in the dictionary and be returned
# from __getitem__ or possibly conflict with a new item in __setitem__.
# We determine whether or not we need to do the GC based on the ability
# to get a reference count: PyPy and Jython don't use refcounts and don't
# expose this; this is safer than blacklisting specific platforms (e.g.,
# what about IronPython?). On refcounted platforms, we don't want to
# run a GC to avoid possible performance regressions (e.g., it may
# block all threads).
# Tests may modify this
_SWEEP_NEEDS_GC
=
not
hasattr
(
sys
,
'getrefcount'
)
# On Jython, we need to explicitly ask it to monitor
# objects if we want a more deterministic GC
if
hasattr
(
gc
,
'monitorObject'
):
# pragma: no cover
_gc_monitor
=
gc
.
monitorObject
else
:
def
_gc_monitor
(
o
):
pass
_OGA
=
object
.
__getattribute__
def
_sweeping_ring
(
f
):
# A decorator for functions in the PickleCache
# that are sweeping the entire ring (mutating it);
# serves as a pseudo-lock to not mutate the ring further
# in other functions
def
locked
(
self
,
*
args
,
**
kwargs
):
self
.
_is_sweeping_ring
=
True
try
:
return
f
(
self
,
*
args
,
**
kwargs
)
finally
:
self
.
_is_sweeping_ring
=
False
return
locked
from
.ring
import
Ring
@
implementer
(
IPickleCache
)
@
implementer
(
IPickleCache
)
class
PickleCache
(
object
):
class
PickleCache
(
object
):
total_estimated_size
=
0
cache_size_bytes
=
0
# Set by functions that sweep the entire ring (via _sweeping_ring)
# Serves as a pseudo-lock
_is_sweeping_ring
=
False
def
__init__
(
self
,
jar
,
target_size
=
0
,
cache_size_bytes
=
0
):
def
__init__
(
self
,
jar
,
target_size
=
0
,
cache_size_bytes
=
0
):
# TODO: forward-port Dieter's bytes stuff
# TODO: forward-port Dieter's bytes stuff
self
.
jar
=
jar
self
.
jar
=
jar
self
.
target_size
=
target_size
# We expect the jars to be able to have a pointer to
# us; this is a reference cycle, but certain
# aspects of invalidation and accessing depend on it.
# The actual Connection objects we're used with do set this
# automatically, but many test objects don't.
# TODO: track this on the persistent objects themself?
try
:
jar
.
_cache
=
self
except
AttributeError
:
# Some ZODB tests pass in an object that cannot have an _cache
pass
self
.
cache_size
=
target_size
self
.
drain_resistance
=
0
self
.
drain_resistance
=
0
self
.
non_ghost_count
=
0
self
.
non_ghost_count
=
0
self
.
persistent_classes
=
{}
self
.
persistent_classes
=
{}
self
.
data
=
weakref
.
WeakValueDictionary
()
self
.
data
=
weakref
.
WeakValueDictionary
()
self
.
ring
=
Ring
Node
(
None
)
self
.
ring
=
Ring
(
)
self
.
ring
.
next
=
self
.
ring
.
prev
=
self
.
ring
self
.
cache_size_bytes
=
cache_size_bytes
# IPickleCache API
# IPickleCache API
def
__len__
(
self
):
def
__len__
(
self
):
...
@@ -62,42 +121,64 @@ class PickleCache(object):
...
@@ -62,42 +121,64 @@ class PickleCache(object):
def
__setitem__
(
self
,
oid
,
value
):
def
__setitem__
(
self
,
oid
,
value
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
if
not
isinstance
(
oid
,
OID_TYPE
):
# XXX bytes
# The order of checks matters for C compatibility;
raise
ValueError
(
'OID must be %s: %s'
%
(
OID_TYPE
,
oid
))
# the ZODB tests depend on this
# The C impl requires either a type or a Persistent subclass
if
not
isinstance
(
value
,
_CACHEABLE_TYPES
):
raise
TypeError
(
"Cache values must be persistent objects."
)
value_oid
=
value
.
_p_oid
if
not
isinstance
(
oid
,
OID_TYPE
)
or
not
isinstance
(
value_oid
,
OID_TYPE
):
raise
TypeError
(
'OID must be %s: key=%s _p_oid=%s'
%
(
OID_TYPE
,
oid
,
value_oid
))
if
value_oid
!=
oid
:
raise
ValueError
(
"Cache key does not match oid"
)
# XXX
# XXX
if
oid
in
self
.
persistent_classes
or
oid
in
self
.
data
:
if
oid
in
self
.
persistent_classes
or
oid
in
self
.
data
:
if
self
.
data
[
oid
]
is
not
value
:
# Have to be careful here, a GC might have just run
raise
KeyError
(
'Duplicate OID: %s'
%
oid
)
# and cleaned up the object
if
type
(
value
)
is
type
:
existing_data
=
self
.
get
(
oid
)
if
existing_data
is
not
None
and
existing_data
is
not
value
:
# Raise the same type of exception as the C impl with the same
# message.
raise
ValueError
(
'A different object already has the same oid'
)
# Match the C impl: it requires a jar
jar
=
getattr
(
value
,
'_p_jar'
,
None
)
if
jar
is
None
and
not
isinstance
(
value
,
type
):
raise
ValueError
(
"Cached object jar missing"
)
# It also requires that it cannot be cached more than one place
existing_cache
=
getattr
(
jar
,
'_cache'
,
None
)
if
(
existing_cache
is
not
None
and
existing_cache
is
not
self
and
existing_cache
.
data
.
get
(
oid
)
is
not
None
):
raise
ValueError
(
"Object already in another cache"
)
if
isinstance
(
value
,
type
):
# ZODB.persistentclass.PersistentMetaClass
self
.
persistent_classes
[
oid
]
=
value
self
.
persistent_classes
[
oid
]
=
value
else
:
else
:
self
.
data
[
oid
]
=
value
self
.
data
[
oid
]
=
value
if
value
.
_p_state
!=
GHOST
:
_gc_monitor
(
value
)
if
_OGA
(
value
,
'_p_state'
)
!=
GHOST
and
value
not
in
self
.
ring
:
self
.
ring
.
add
(
value
)
self
.
non_ghost_count
+=
1
self
.
non_ghost_count
+=
1
mru
=
self
.
ring
.
prev
self
.
ring
.
prev
=
node
=
RingNode
(
value
,
self
.
ring
,
mru
)
mru
.
next
=
node
def
__delitem__
(
self
,
oid
):
def
__delitem__
(
self
,
oid
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
if
not
isinstance
(
oid
,
OID_TYPE
):
if
not
isinstance
(
oid
,
OID_TYPE
):
raise
Valu
eError
(
'OID must be %s: %s'
%
(
OID_TYPE
,
oid
))
raise
Typ
eError
(
'OID must be %s: %s'
%
(
OID_TYPE
,
oid
))
if
oid
in
self
.
persistent_classes
:
if
oid
in
self
.
persistent_classes
:
del
self
.
persistent_classes
[
oid
]
del
self
.
persistent_classes
[
oid
]
else
:
else
:
value
=
self
.
data
.
pop
(
oid
)
value
=
self
.
data
.
pop
(
oid
)
node
=
self
.
ring
.
next
self
.
ring
.
delete
(
value
)
while
node
is
not
self
.
ring
:
if
node
.
object
is
value
:
node
.
prev
.
next
,
node
.
next
.
prev
=
node
.
next
,
node
.
prev
self
.
non_ghost_count
-=
1
break
node
=
node
.
next
def
get
(
self
,
oid
,
default
=
None
):
def
get
(
self
,
oid
,
default
=
None
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
value
=
self
.
data
.
get
(
oid
,
self
)
value
=
self
.
data
.
get
(
oid
,
self
)
if
value
is
not
self
:
if
value
is
not
self
:
return
value
return
value
...
@@ -106,32 +187,26 @@ class PickleCache(object):
...
@@ -106,32 +187,26 @@ class PickleCache(object):
def
mru
(
self
,
oid
):
def
mru
(
self
,
oid
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
node
=
self
.
ring
.
next
if
self
.
_is_sweeping_ring
:
while
node
is
not
self
.
ring
and
node
.
object
.
_p_oid
!=
oid
:
# accessess during sweeping, such as with an
node
=
node
.
next
# overridden _p_deactivate, don't mutate the ring
if
node
is
self
.
ring
:
# because that could leave it inconsistent
return
False
# marker return for tests
value
=
self
.
data
[
oid
]
value
=
self
.
data
[
oid
]
if
value
.
_p_state
!=
GHOST
:
was_in_ring
=
value
in
self
.
ring
if
not
was_in_ring
:
if
_OGA
(
value
,
'_p_state'
)
!=
GHOST
:
self
.
ring
.
add
(
value
)
self
.
non_ghost_count
+=
1
self
.
non_ghost_count
+=
1
mru
=
self
.
ring
.
prev
self
.
ring
.
prev
=
node
=
RingNode
(
value
,
self
.
ring
,
mru
)
mru
.
next
=
node
else
:
else
:
# remove from old location
self
.
ring
.
move_to_head
(
value
)
node
.
prev
.
next
,
node
.
next
.
prev
=
node
.
next
,
node
.
prev
# splice into new
self
.
ring
.
prev
.
next
,
node
.
prev
=
node
,
self
.
ring
.
prev
self
.
ring
.
prev
,
node
.
next
=
node
,
self
.
ring
def
ringlen
(
self
):
def
ringlen
(
self
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
result
=
0
return
len
(
self
.
ring
)
node
=
self
.
ring
.
next
while
node
is
not
self
.
ring
:
result
+=
1
node
=
node
.
next
return
result
def
items
(
self
):
def
items
(
self
):
""" See IPickleCache.
""" See IPickleCache.
...
@@ -142,10 +217,8 @@ class PickleCache(object):
...
@@ -142,10 +217,8 @@ class PickleCache(object):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
result
=
[]
result
=
[]
node
=
self
.
ring
.
next
for
obj
in
self
.
ring
:
while
node
is
not
self
.
ring
:
result
.
append
((
obj
.
_p_oid
,
obj
))
result
.
append
((
node
.
object
.
_p_oid
,
node
.
object
))
node
=
node
.
next
return
result
return
result
def
klass_items
(
self
):
def
klass_items
(
self
):
...
@@ -156,18 +229,20 @@ class PickleCache(object):
...
@@ -156,18 +229,20 @@ class PickleCache(object):
def
incrgc
(
self
,
ignored
=
None
):
def
incrgc
(
self
,
ignored
=
None
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
target
=
self
.
target
_size
target
=
self
.
cache
_size
if
self
.
drain_resistance
>=
1
:
if
self
.
drain_resistance
>=
1
:
size
=
self
.
non_ghost_count
size
=
self
.
non_ghost_count
target2
=
size
-
1
-
(
size
/
self
.
drain_resistance
)
target2
=
size
-
1
-
(
size
/
/
self
.
drain_resistance
)
if
target2
<
target
:
if
target2
<
target
:
target
=
target2
target
=
target2
self
.
_sweep
(
target
)
# return value for testing
return
self
.
_sweep
(
target
,
self
.
cache_size_bytes
)
def
full_sweep
(
self
,
target
=
None
):
def
full_sweep
(
self
,
target
=
None
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
self
.
_sweep
(
0
)
# return value for testing
return
self
.
_sweep
(
0
)
minimize
=
full_sweep
minimize
=
full_sweep
...
@@ -182,9 +257,14 @@ class PickleCache(object):
...
@@ -182,9 +257,14 @@ class PickleCache(object):
raise
KeyError
(
'Duplicate OID: %s'
%
oid
)
raise
KeyError
(
'Duplicate OID: %s'
%
oid
)
obj
.
_p_oid
=
oid
obj
.
_p_oid
=
oid
obj
.
_p_jar
=
self
.
jar
obj
.
_p_jar
=
self
.
jar
if
type
(
obj
)
is
not
type
:
if
not
isinstance
(
obj
,
type
)
:
if
obj
.
_p_state
!=
GHOST
:
if
obj
.
_p_state
!=
GHOST
:
obj
.
_p_invalidate
()
# The C implementation sets this stuff directly,
# but we delegate to the class. However, we must be
# careful to avoid broken _p_invalidate and _p_deactivate
# that don't call the super class. See ZODB's
# testConnection.doctest_proper_ghost_initialization_with_empty__p_deactivate
obj
.
_p_invalidate_deactivate_helper
()
self
[
oid
]
=
obj
self
[
oid
]
=
obj
def
reify
(
self
,
to_reify
):
def
reify
(
self
,
to_reify
):
...
@@ -197,9 +277,7 @@ class PickleCache(object):
...
@@ -197,9 +277,7 @@ class PickleCache(object):
if
value
.
_p_state
==
GHOST
:
if
value
.
_p_state
==
GHOST
:
value
.
_p_activate
()
value
.
_p_activate
()
self
.
non_ghost_count
+=
1
self
.
non_ghost_count
+=
1
mru
=
self
.
ring
.
prev
self
.
mru
(
oid
)
self
.
ring
.
prev
=
node
=
RingNode
(
value
,
self
.
ring
,
mru
)
mru
.
next
=
node
def
invalidate
(
self
,
to_invalidate
):
def
invalidate
(
self
,
to_invalidate
):
""" See IPickleCache.
""" See IPickleCache.
...
@@ -229,36 +307,100 @@ class PickleCache(object):
...
@@ -229,36 +307,100 @@ class PickleCache(object):
def
update_object_size_estimation
(
self
,
oid
,
new_size
):
def
update_object_size_estimation
(
self
,
oid
,
new_size
):
""" See IPickleCache.
""" See IPickleCache.
"""
"""
pass
#pragma NO COVER
value
=
self
.
data
.
get
(
oid
)
if
value
is
not
None
:
# Recall that while the argument is given in bytes,
# we have to work with 64-block chunks (plus one)
# to match the C implementation. Hence the convoluted
# arithmetic
new_size_in_24
=
_estimated_size_in_24_bits
(
new_size
)
p_est_size_in_24
=
value
.
_Persistent__size
new_est_size_in_bytes
=
(
new_size_in_24
-
p_est_size_in_24
)
*
64
self
.
total_estimated_size
+=
new_est_size_in_bytes
cache_size
=
property
(
lambda
self
:
self
.
target_size
)
cache_drain_resistance
=
property
(
lambda
self
:
self
.
drain_resistance
)
cache_drain_resistance
=
property
(
lambda
self
:
self
.
drain_resistance
)
cache_non_ghost_count
=
property
(
lambda
self
:
self
.
non_ghost_count
)
cache_non_ghost_count
=
property
(
lambda
self
:
self
.
non_ghost_count
)
cache_data
=
property
(
lambda
self
:
dict
(
self
.
data
.
items
()))
cache_data
=
property
(
lambda
self
:
dict
(
self
.
data
.
items
()))
cache_klass_count
=
property
(
lambda
self
:
len
(
self
.
persistent_classes
))
cache_klass_count
=
property
(
lambda
self
:
len
(
self
.
persistent_classes
))
# Helpers
# Helpers
def
_sweep
(
self
,
target
):
# lock
# Set to true when a deactivation happens in our code. For
node
=
self
.
ring
.
next
# compatibility with the C implementation, we can only remove the
while
node
is
not
self
.
ring
and
self
.
non_ghost_count
>
target
:
# node and decrement our non-ghost count if our implementation
if
node
.
object
.
_p_state
not
in
(
STICKY
,
CHANGED
):
# actually runs (broken subclasses can forget to call super; ZODB
node
.
prev
.
next
,
node
.
next
.
prev
=
node
.
next
,
node
.
prev
# has tests for this). This gets set to false everytime we examine
node
.
object
=
None
# a node and checked afterwards. The C implementation has a very
# incestuous relationship between cPickleCache and cPersistence:
# the pickle cache calls _p_deactivate, which is responsible for
# both decrementing the non-ghost count and removing its node from
# the cache ring (and, if it gets deallocated, from the pickle
# cache's dictionary). We're trying to keep that to a minimum, but
# there's no way around it if we want full compatibility.
_persistent_deactivate_ran
=
False
@
_sweeping_ring
def
_sweep
(
self
,
target
,
target_size_bytes
=
0
):
# To avoid mutating datastructures in place or making a copy,
# and to work efficiently with both the CFFI ring and the
# deque-based ring, we collect the objects and their indexes
# up front and then hand them off for ejection.
# We don't use enumerate because that's slow under PyPy
i
=
-
1
to_eject
=
[]
for
value
in
self
.
ring
:
if
self
.
non_ghost_count
<=
target
and
(
self
.
total_estimated_size
<=
target_size_bytes
or
not
target_size_bytes
):
break
i
+=
1
if
value
.
_p_state
==
UPTODATE
:
# The C implementation will only evict things that are specifically
# in the up-to-date state
self
.
_persistent_deactivate_ran
=
False
# sweeping an object out of the cache should also
# ghost it---that's what C does. This winds up
# calling `update_object_size_estimation`.
# Also in C, if this was the last reference to the object,
# it removes itself from the `data` dictionary.
# If we're under PyPy or Jython, we need to run a GC collection
# to make this happen...this is only noticeable though, when
# we eject objects. Also, note that we can only take any of these
# actions if our _p_deactivate ran, in case of buggy subclasses.
# see _persistent_deactivate_ran
value
.
_p_deactivate
()
if
(
self
.
_persistent_deactivate_ran
# Test-cases sneak in non-Persistent objects, sigh, so naturally
# they don't cooperate (without this check a bunch of test_picklecache
# breaks)
or
not
isinstance
(
value
,
_SWEEPABLE_TYPES
)):
to_eject
.
append
((
i
,
value
))
self
.
non_ghost_count
-=
1
self
.
non_ghost_count
-=
1
node
=
node
.
next
ejected
=
len
(
to_eject
)
if
ejected
:
self
.
ring
.
delete_all
(
to_eject
)
del
to_eject
# Got to clear our local if we want the GC to get the weak refs
if
ejected
and
_SWEEP_NEEDS_GC
:
# See comments on _SWEEP_NEEDS_GC
gc
.
collect
()
return
ejected
@
_sweeping_ring
def
_invalidate
(
self
,
oid
):
def
_invalidate
(
self
,
oid
):
value
=
self
.
data
.
get
(
oid
)
value
=
self
.
data
.
get
(
oid
)
if
value
is
not
None
and
value
.
_p_state
!=
GHOST
:
if
value
is
not
None
and
value
.
_p_state
!=
GHOST
:
value
.
_p_invalidate
()
value
.
_p_invalidate
()
node
=
self
.
ring
.
next
was_in_ring
=
self
.
ring
.
delete
(
value
)
while
True
:
self
.
non_ghost_count
-=
1
if
node
is
self
.
ring
:
break
# pragma: no cover belt-and-suspenders
if
node
.
object
is
value
:
node
.
prev
.
next
,
node
.
next
.
prev
=
node
.
next
,
node
.
prev
break
node
=
node
.
next
elif
oid
in
self
.
persistent_classes
:
elif
oid
in
self
.
persistent_classes
:
del
self
.
persistent_classes
[
oid
]
persistent_class
=
self
.
persistent_classes
.
pop
(
oid
)
try
:
# ZODB.persistentclass.PersistentMetaClass objects
# have this method and it must be called for transaction abort
# and other forms of invalidation to work
persistent_class
.
_p_invalidate
()
except
AttributeError
:
pass
persistent/ring.py
0 → 100644
View file @
39c1f033
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2015 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
#pylint: disable=W0212,E0211,W0622,E0213,W0221,E0239
from
zope.interface
import
Interface
from
zope.interface
import
implementer
class
IRing
(
Interface
):
"""Conceptually, a doubly-linked list for efficiently keeping track of least-
and most-recently used :class:`persistent.interfaces.IPersistent` objects.
This is meant to be used by the :class:`persistent.picklecache.PickleCache`
and should not be considered a public API. This interface documentation exists
to assist development of the picklecache and alternate implementations by
explaining assumptions and performance requirements.
"""
def
__len__
():
"""Return the number of persistent objects stored in the ring.
Should be constant time.
"""
def
__contains__
(
object
):
"""Answer whether the given persistent object is found in the ring.
Must not rely on object equality or object hashing, but only
identity or the `_p_oid`. Should be constant time.
"""
def
add
(
object
):
"""Add the persistent object to the ring as most-recently used.
When an object is in the ring, the ring holds a strong
reference to it so it can be deactivated later by the pickle
cache. Should be constant time.
The object should not already be in the ring, but this is not necessarily
enforced.
"""
def
delete
(
object
):
"""Remove the object from the ring if it is present.
Returns a true value if it was present and a false value
otherwise. An ideal implementation should be constant time,
but linear time is allowed.
"""
def
move_to_head
(
object
):
"""Place the object as the most recently used object in the ring.
The object should already be in the ring, but this is not
necessarily enforced, and attempting to move an object that is
not in the ring has undefined consequences. An ideal
implementation should be constant time, but linear time is
allowed.
"""
def
delete_all
(
indexes_and_values
):
"""Given a sequence of pairs (index, object), remove all of them from
the ring.
This should be equivalent to calling :meth:`delete` for each
value, but allows for a more efficient bulk deletion process.
If the index and object pairs do not match with the actual state of the
ring, this operation is undefined.
Should be at least linear time (not quadratic).
"""
def
__iter__
():
"""Iterate over each persistent object in the ring, in the order of least
recently used to most recently used.
Mutating the ring while an iteration is in progress has
undefined consequences.
"""
from
collections
import
deque
@
implementer
(
IRing
)
class
_DequeRing
(
object
):
"""A ring backed by the :class:`collections.deque` class.
Operations are a mix of constant and linear time.
It is available on all platforms.
"""
__slots__
=
(
'ring'
,
'ring_oids'
)
def
__init__
(
self
):
self
.
ring
=
deque
()
self
.
ring_oids
=
set
()
def
__len__
(
self
):
return
len
(
self
.
ring
)
def
__contains__
(
self
,
pobj
):
return
pobj
.
_p_oid
in
self
.
ring_oids
def
add
(
self
,
pobj
):
self
.
ring
.
append
(
pobj
)
self
.
ring_oids
.
add
(
pobj
.
_p_oid
)
def
delete
(
self
,
pobj
):
# Note that we do not use self.ring.remove() because that
# uses equality semantics and we don't want to call the persistent
# object's __eq__ method (which might wake it up just after we
# tried to ghost it)
for
i
,
o
in
enumerate
(
self
.
ring
):
if
o
is
pobj
:
del
self
.
ring
[
i
]
self
.
ring_oids
.
discard
(
pobj
.
_p_oid
)
return
1
def
move_to_head
(
self
,
pobj
):
self
.
delete
(
pobj
)
self
.
add
(
pobj
)
def
delete_all
(
self
,
indexes_and_values
):
for
ix
,
value
in
reversed
(
indexes_and_values
):
del
self
.
ring
[
ix
]
self
.
ring_oids
.
discard
(
value
.
_p_oid
)
def
__iter__
(
self
):
return
iter
(
self
.
ring
)
try
:
from
cffi
import
FFI
except
ImportError
:
# pragma: no cover
_CFFIRing
=
None
else
:
import
os
this_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
ffi
=
FFI
()
with
open
(
os
.
path
.
join
(
this_dir
,
'ring.h'
))
as
f
:
ffi
.
cdef
(
f
.
read
())
_FFI_RING
=
ffi
.
verify
(
"""
#include "ring.c"
"""
,
include_dirs
=
[
this_dir
])
_OGA
=
object
.
__getattribute__
_OSA
=
object
.
__setattr__
#pylint: disable=E1101
@
implementer
(
IRing
)
class
_CFFIRing
(
object
):
"""A ring backed by a C implementation. All operations are constant time.
It is only available on platforms with ``cffi`` installed.
"""
__slots__
=
(
'ring_home'
,
'ring_to_obj'
)
def
__init__
(
self
):
node
=
self
.
ring_home
=
ffi
.
new
(
"CPersistentRing*"
)
node
.
r_next
=
node
node
.
r_prev
=
node
# In order for the CFFI objects to stay alive, we must keep
# a strong reference to them, otherwise they get freed. We must
# also keep strong references to the objects so they can be deactivated
self
.
ring_to_obj
=
dict
()
def
__len__
(
self
):
return
len
(
self
.
ring_to_obj
)
def
__contains__
(
self
,
pobj
):
return
getattr
(
pobj
,
'_Persistent__ring'
,
self
)
in
self
.
ring_to_obj
def
add
(
self
,
pobj
):
node
=
ffi
.
new
(
"CPersistentRing*"
)
_FFI_RING
.
ring_add
(
self
.
ring_home
,
node
)
self
.
ring_to_obj
[
node
]
=
pobj
_OSA
(
pobj
,
'_Persistent__ring'
,
node
)
def
delete
(
self
,
pobj
):
its_node
=
getattr
(
pobj
,
'_Persistent__ring'
,
None
)
our_obj
=
self
.
ring_to_obj
.
pop
(
its_node
,
None
)
if
its_node
is
not
None
and
our_obj
is
not
None
and
its_node
.
r_next
:
_FFI_RING
.
ring_del
(
its_node
)
return
1
def
move_to_head
(
self
,
pobj
):
node
=
_OGA
(
pobj
,
'_Persistent__ring'
)
_FFI_RING
.
ring_move_to_head
(
self
.
ring_home
,
node
)
def
delete_all
(
self
,
indexes_and_values
):
for
_
,
value
in
indexes_and_values
:
self
.
delete
(
value
)
def
iteritems
(
self
):
head
=
self
.
ring_home
here
=
head
.
r_next
while
here
!=
head
:
yield
here
here
=
here
.
r_next
def
__iter__
(
self
):
ring_to_obj
=
self
.
ring_to_obj
for
node
in
self
.
iteritems
():
yield
ring_to_obj
[
node
]
# Export the best available implementation
Ring
=
_CFFIRing
if
_CFFIRing
else
_DequeRing
persistent/tests/test_persistence.py
View file @
39c1f033
...
@@ -18,10 +18,20 @@ import platform
...
@@ -18,10 +18,20 @@ import platform
import
sys
import
sys
py_impl
=
getattr
(
platform
,
'python_implementation'
,
lambda
:
None
)
py_impl
=
getattr
(
platform
,
'python_implementation'
,
lambda
:
None
)
_is_pypy3
=
py_impl
()
==
'PyPy'
and
sys
.
version_info
[
0
]
>
2
_is_pypy3
=
py_impl
()
==
'PyPy'
and
sys
.
version_info
[
0
]
>
2
_is_jython
=
py_impl
()
==
'Jython'
#pylint: disable=R0904,W0212,E1101
class
_Persistent_Base
(
object
):
class
_Persistent_Base
(
object
):
def
_getTargetClass
(
self
):
# concrete testcase classes must override
raise
NotImplementedError
()
def
_makeCache
(
self
,
jar
):
# concrete testcase classes must override
raise
NotImplementedError
()
def
_makeOne
(
self
,
*
args
,
**
kw
):
def
_makeOne
(
self
,
*
args
,
**
kw
):
return
self
.
_getTargetClass
()(
*
args
,
**
kw
)
return
self
.
_getTargetClass
()(
*
args
,
**
kw
)
...
@@ -31,11 +41,23 @@ class _Persistent_Base(object):
...
@@ -31,11 +41,23 @@ class _Persistent_Base(object):
@
implementer
(
IPersistentDataManager
)
@
implementer
(
IPersistentDataManager
)
class
_Jar
(
object
):
class
_Jar
(
object
):
_cache
=
None
# Set this to a value to have our `setstate`
# pass it through to the object's __setstate__
setstate_calls_object
=
None
# Set this to a value to have our `setstate`
# set the _p_serial of the object
setstate_sets_serial
=
None
def
__init__
(
self
):
def
__init__
(
self
):
self
.
_loaded
=
[]
self
.
_loaded
=
[]
self
.
_registered
=
[]
self
.
_registered
=
[]
def
setstate
(
self
,
obj
):
def
setstate
(
self
,
obj
):
self
.
_loaded
.
append
(
obj
.
_p_oid
)
self
.
_loaded
.
append
(
obj
.
_p_oid
)
if
self
.
setstate_calls_object
is
not
None
:
obj
.
__setstate__
(
self
.
setstate_calls_object
)
if
self
.
setstate_sets_serial
is
not
None
:
obj
.
_p_serial
=
self
.
setstate_sets_serial
def
register
(
self
,
obj
):
def
register
(
self
,
obj
):
self
.
_registered
.
append
(
obj
.
_p_oid
)
self
.
_registered
.
append
(
obj
.
_p_oid
)
...
@@ -112,12 +134,34 @@ class _Persistent_Base(object):
...
@@ -112,12 +134,34 @@ class _Persistent_Base(object):
del
inst
.
_p_jar
del
inst
.
_p_jar
self
.
assertEqual
(
inst
.
_p_jar
,
None
)
self
.
assertEqual
(
inst
.
_p_jar
,
None
)
def
test_del_jar_of_inactive_object_that_has_no_state
(
self
):
# If an object is ghosted, and we try to delete its
# jar, we shouldn't activate the object.
# Simulate a POSKeyError on _p_activate; this can happen aborting
# a transaction using ZEO
broken_jar
=
self
.
_makeBrokenJar
()
inst
=
self
.
_makeOne
()
inst
.
_p_oid
=
42
inst
.
_p_jar
=
broken_jar
# make it inactive
inst
.
_p_deactivate
()
self
.
assertEqual
(
inst
.
_p_status
,
"ghost"
)
# delete the jar; if we activated the object, the broken
# jar would raise NotImplementedError
del
inst
.
_p_jar
def
test_assign_p_jar_w_new_jar
(
self
):
def
test_assign_p_jar_w_new_jar
(
self
):
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
()
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
()
new_jar
=
self
.
_makeJar
()
new_jar
=
self
.
_makeJar
()
def
_test
()
:
try
:
inst
.
_p_jar
=
new_jar
inst
.
_p_jar
=
new_jar
self
.
assertRaises
(
ValueError
,
_test
)
except
ValueError
as
e
:
self
.
assertEqual
(
str
(
e
),
"can not change _p_jar of cached object"
)
else
:
self
.
fail
(
"Should raise ValueError"
)
def
test_assign_p_jar_w_valid_jar
(
self
):
def
test_assign_p_jar_w_valid_jar
(
self
):
jar
=
self
.
_makeJar
()
jar
=
self
.
_makeJar
()
...
@@ -127,11 +171,25 @@ class _Persistent_Base(object):
...
@@ -127,11 +171,25 @@ class _Persistent_Base(object):
self
.
assertTrue
(
inst
.
_p_jar
is
jar
)
self
.
assertTrue
(
inst
.
_p_jar
is
jar
)
inst
.
_p_jar
=
jar
# reassign only to same DM
inst
.
_p_jar
=
jar
# reassign only to same DM
def
test_assign_p_jar_not_in_cache_allowed
(
self
):
jar
=
self
.
_makeJar
()
inst
=
self
.
_makeOne
()
inst
.
_p_jar
=
jar
# Both of these are allowed
inst
.
_p_jar
=
self
.
_makeJar
()
inst
.
_p_jar
=
None
self
.
assertEqual
(
inst
.
_p_jar
,
None
)
def
test_assign_p_oid_w_invalid_oid
(
self
):
def
test_assign_p_oid_w_invalid_oid
(
self
):
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
()
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
()
def
_test
():
try
:
inst
.
_p_oid
=
object
()
inst
.
_p_oid
=
object
()
self
.
assertRaises
(
ValueError
,
_test
)
except
ValueError
as
e
:
self
.
assertEqual
(
str
(
e
),
'can not change _p_oid of cached object'
)
else
:
self
.
fail
(
"Should raise value error"
)
def
test_assign_p_oid_w_valid_oid
(
self
):
def
test_assign_p_oid_w_valid_oid
(
self
):
from
persistent.timestamp
import
_makeOctets
from
persistent.timestamp
import
_makeOctets
...
@@ -166,6 +224,14 @@ class _Persistent_Base(object):
...
@@ -166,6 +224,14 @@ class _Persistent_Base(object):
inst
.
_p_oid
=
new_OID
inst
.
_p_oid
=
new_OID
self
.
assertRaises
(
ValueError
,
_test
)
self
.
assertRaises
(
ValueError
,
_test
)
def
test_assign_p_oid_not_in_cache_allowed
(
self
):
jar
=
self
.
_makeJar
()
inst
=
self
.
_makeOne
()
inst
.
_p_jar
=
jar
inst
.
_p_oid
=
1
# anything goes
inst
.
_p_oid
=
42
self
.
assertEqual
(
inst
.
_p_oid
,
42
)
def
test_delete_p_oid_wo_jar
(
self
):
def
test_delete_p_oid_wo_jar
(
self
):
from
persistent.timestamp
import
_makeOctets
from
persistent.timestamp
import
_makeOctets
OID
=
_makeOctets
(
'
\
x01
'
*
8
)
OID
=
_makeOctets
(
'
\
x01
'
*
8
)
...
@@ -489,6 +555,18 @@ class _Persistent_Base(object):
...
@@ -489,6 +555,18 @@ class _Persistent_Base(object):
inst
.
_p_serial
=
ts
.
raw
()
inst
.
_p_serial
=
ts
.
raw
()
self
.
assertEqual
(
inst
.
_p_mtime
,
ts
.
timeTime
())
self
.
assertEqual
(
inst
.
_p_mtime
,
ts
.
timeTime
())
def
test__p_mtime_activates_object
(
self
):
# Accessing _p_mtime implicitly unghostifies the object
from
persistent.timestamp
import
TimeStamp
WHEN_TUPLE
=
(
2011
,
2
,
15
,
13
,
33
,
27.5
)
ts
=
TimeStamp
(
*
WHEN_TUPLE
)
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
()
jar
.
setstate_sets_serial
=
ts
.
raw
()
inst
.
_p_invalidate
()
self
.
assertEqual
(
inst
.
_p_status
,
'ghost'
)
self
.
assertEqual
(
inst
.
_p_mtime
,
ts
.
timeTime
())
self
.
assertEqual
(
inst
.
_p_status
,
'saved'
)
def
test__p_state_unsaved
(
self
):
def
test__p_state_unsaved
(
self
):
inst
=
self
.
_makeOne
()
inst
=
self
.
_makeOne
()
inst
.
_p_changed
=
True
inst
.
_p_changed
=
True
...
@@ -575,7 +653,6 @@ class _Persistent_Base(object):
...
@@ -575,7 +653,6 @@ class _Persistent_Base(object):
'_p_oid'
,
'_p_oid'
,
'_p_changed'
,
'_p_changed'
,
'_p_serial'
,
'_p_serial'
,
'_p_mtime'
,
'_p_state'
,
'_p_state'
,
'_p_estimated_size'
,
'_p_estimated_size'
,
'_p_sticky'
,
'_p_sticky'
,
...
@@ -586,6 +663,9 @@ class _Persistent_Base(object):
...
@@ -586,6 +663,9 @@ class _Persistent_Base(object):
for
name
in
NAMES
:
for
name
in
NAMES
:
getattr
(
inst
,
name
)
getattr
(
inst
,
name
)
self
.
_checkMRU
(
jar
,
[])
self
.
_checkMRU
(
jar
,
[])
# _p_mtime is special, it activates the object
getattr
(
inst
,
'_p_mtime'
)
self
.
_checkMRU
(
jar
,
[
OID
])
def
test___getattribute__special_name
(
self
):
def
test___getattribute__special_name
(
self
):
from
persistent.persistence
import
SPECIAL_NAMES
from
persistent.persistence
import
SPECIAL_NAMES
...
@@ -628,6 +708,24 @@ class _Persistent_Base(object):
...
@@ -628,6 +708,24 @@ class _Persistent_Base(object):
self
.
assertEqual
(
getattr
(
inst
,
'normal'
,
None
),
'value'
)
self
.
assertEqual
(
getattr
(
inst
,
'normal'
,
None
),
'value'
)
self
.
_checkMRU
(
jar
,
[
OID
])
self
.
_checkMRU
(
jar
,
[
OID
])
def
test___getattribute___non_cooperative
(
self
):
# Getting attributes is NOT cooperative with the superclass.
# This comes from the C implementation and is maintained
# for backwards compatibility. (For example, Persistent and
# ExtensionClass.Base/Acquisition take special care to mix together.)
class
Base
(
object
):
def
__getattribute__
(
self
,
name
):
if
name
==
'magic'
:
return
42
return
super
(
Base
,
self
).
__getattribute__
(
name
)
self
.
assertEqual
(
getattr
(
Base
(),
'magic'
),
42
)
class
Derived
(
self
.
_getTargetClass
(),
Base
):
pass
self
.
assertRaises
(
AttributeError
,
getattr
,
Derived
(),
'magic'
)
def
test___setattr___p__names
(
self
):
def
test___setattr___p__names
(
self
):
from
persistent.timestamp
import
_makeOctets
from
persistent.timestamp
import
_makeOctets
SERIAL
=
_makeOctets
(
'
\
x01
'
*
8
)
SERIAL
=
_makeOctets
(
'
\
x01
'
*
8
)
...
@@ -869,7 +967,7 @@ class _Persistent_Base(object):
...
@@ -869,7 +967,7 @@ class _Persistent_Base(object):
self
.
assertEqual
(
inst
.
baz
,
'bam'
)
self
.
assertEqual
(
inst
.
baz
,
'bam'
)
self
.
assertEqual
(
inst
.
qux
,
'spam'
)
self
.
assertEqual
(
inst
.
qux
,
'spam'
)
if
not
_is_pypy3
:
if
not
_is_pypy3
and
not
_is_jython
:
def
test___setstate___interns_dict_keys
(
self
):
def
test___setstate___interns_dict_keys
(
self
):
class
Derived
(
self
.
_getTargetClass
()):
class
Derived
(
self
.
_getTargetClass
()):
pass
pass
...
@@ -884,6 +982,19 @@ class _Persistent_Base(object):
...
@@ -884,6 +982,19 @@ class _Persistent_Base(object):
key2
=
list
(
inst2
.
__dict__
.
keys
())[
0
]
key2
=
list
(
inst2
.
__dict__
.
keys
())[
0
]
self
.
assertTrue
(
key1
is
key2
)
self
.
assertTrue
(
key1
is
key2
)
def
test___setstate___doesnt_fail_on_non_string_keys
(
self
):
class
Derived
(
self
.
_getTargetClass
()):
pass
inst1
=
Derived
()
inst1
.
__setstate__
({
1
:
2
})
self
.
assertTrue
(
1
in
inst1
.
__dict__
)
class
MyStr
(
str
):
pass
mystr
=
MyStr
(
'mystr'
)
inst1
.
__setstate__
({
mystr
:
2
})
self
.
assertTrue
(
mystr
in
inst1
.
__dict__
)
def
test___reduce__
(
self
):
def
test___reduce__
(
self
):
from
persistent._compat
import
copy_reg
from
persistent._compat
import
copy_reg
inst
=
self
.
_makeOne
()
inst
=
self
.
_makeOne
()
...
@@ -1025,6 +1136,32 @@ class _Persistent_Base(object):
...
@@ -1025,6 +1136,32 @@ class _Persistent_Base(object):
inst
.
_p_activate
()
inst
.
_p_activate
()
self
.
assertEqual
(
list
(
jar
.
_loaded
),
[
OID
])
self
.
assertEqual
(
list
(
jar
.
_loaded
),
[
OID
])
def
test__p_activate_leaves_object_in_saved_even_if_object_mutated_self
(
self
):
# If the object's __setstate__ set's attributes
# when called by p_activate, the state is still
# 'saved' when done. Furthemore, the object is not
# registered with the jar
class
WithSetstate
(
self
.
_getTargetClass
()):
state
=
None
def
__setstate__
(
self
,
state
):
self
.
state
=
state
inst
,
jar
,
OID
=
self
.
_makeOneWithJar
(
klass
=
WithSetstate
)
inst
.
_p_invalidate
()
# make it a ghost
self
.
assertEqual
(
inst
.
_p_status
,
'ghost'
)
jar
.
setstate_calls_object
=
42
inst
.
_p_activate
()
# It get loaded
self
.
assertEqual
(
list
(
jar
.
_loaded
),
[
OID
])
# and __setstate__ got called to mutate the object
self
.
assertEqual
(
inst
.
state
,
42
)
# but it's still in the saved state
self
.
assertEqual
(
inst
.
_p_status
,
'saved'
)
# and it is not registered as changed by the jar
self
.
assertEqual
(
list
(
jar
.
_registered
),
[])
def
test__p_deactivate_from_unsaved
(
self
):
def
test__p_deactivate_from_unsaved
(
self
):
inst
=
self
.
_makeOne
()
inst
=
self
.
_makeOne
()
inst
.
_p_deactivate
()
inst
.
_p_deactivate
()
...
@@ -1381,6 +1518,36 @@ class _Persistent_Base(object):
...
@@ -1381,6 +1518,36 @@ class _Persistent_Base(object):
inst
=
subclass
()
inst
=
subclass
()
self
.
assertEqual
(
object
.
__getattribute__
(
inst
,
'_v_setattr_called'
),
False
)
self
.
assertEqual
(
object
.
__getattribute__
(
inst
,
'_v_setattr_called'
),
False
)
def
test_can_set__p_attrs_if_subclass_denies_setattr
(
self
):
from
persistent._compat
import
_b
# ZODB defines a PersistentBroken subclass that only lets us
# set things that start with _p, so make sure we can do that
class
Broken
(
self
.
_getTargetClass
()):
def
__setattr__
(
self
,
name
,
value
):
if
name
.
startswith
(
'_p_'
):
super
(
Broken
,
self
).
__setattr__
(
name
,
value
)
else
:
raise
TypeError
(
"Can't change broken objects"
)
KEY
=
_b
(
'123'
)
jar
=
self
.
_makeJar
()
broken
=
Broken
()
broken
.
_p_oid
=
KEY
broken
.
_p_jar
=
jar
broken
.
_p_changed
=
True
broken
.
_p_changed
=
0
def
test_p_invalidate_calls_p_deactivate
(
self
):
class
P
(
self
.
_getTargetClass
()):
deactivated
=
False
def
_p_deactivate
(
self
):
self
.
deactivated
=
True
p
=
P
()
p
.
_p_invalidate
()
self
.
assertTrue
(
p
.
deactivated
)
class
PyPersistentTests
(
unittest
.
TestCase
,
_Persistent_Base
):
class
PyPersistentTests
(
unittest
.
TestCase
,
_Persistent_Base
):
def
_getTargetClass
(
self
):
def
_getTargetClass
(
self
):
...
@@ -1404,6 +1571,8 @@ class PyPersistentTests(unittest.TestCase, _Persistent_Base):
...
@@ -1404,6 +1571,8 @@ class PyPersistentTests(unittest.TestCase, _Persistent_Base):
return
self
.
_data
.
get
(
oid
)
return
self
.
_data
.
get
(
oid
)
def
__delitem__
(
self
,
oid
):
def
__delitem__
(
self
,
oid
):
del
self
.
_data
[
oid
]
del
self
.
_data
[
oid
]
def
update_object_size_estimation
(
self
,
oid
,
new_size
):
pass
return
_Cache
(
jar
)
return
_Cache
(
jar
)
...
@@ -1435,6 +1604,58 @@ class PyPersistentTests(unittest.TestCase, _Persistent_Base):
...
@@ -1435,6 +1604,58 @@ class PyPersistentTests(unittest.TestCase, _Persistent_Base):
c1
.
_p_accessed
()
c1
.
_p_accessed
()
self
.
_checkMRU
(
jar
,
[])
self
.
_checkMRU
(
jar
,
[])
def
test_accessed_invalidated_with_jar_and_oid_but_no_cache
(
self
):
# This scenario arises in ZODB tests where the jar is faked
from
persistent._compat
import
_b
KEY
=
_b
(
'123'
)
class
Jar
(
object
):
accessed
=
False
def
__getattr__
(
self
,
name
):
if
name
==
'_cache'
:
self
.
accessed
=
True
raise
AttributeError
(
name
)
def
register
(
self
,
*
args
):
pass
c1
=
self
.
_makeOne
()
c1
.
_p_oid
=
KEY
c1
.
_p_jar
=
Jar
()
c1
.
_p_changed
=
True
self
.
assertEqual
(
c1
.
_p_state
,
1
)
c1
.
_p_accessed
()
self
.
assertTrue
(
c1
.
_p_jar
.
accessed
)
c1
.
_p_jar
.
accessed
=
False
c1
.
_p_invalidate_deactivate_helper
()
self
.
assertTrue
(
c1
.
_p_jar
.
accessed
)
c1
.
_p_jar
.
accessed
=
False
c1
.
_Persistent__flags
=
None
# coverage
c1
.
_p_invalidate_deactivate_helper
()
self
.
assertTrue
(
c1
.
_p_jar
.
accessed
)
def
test_p_activate_with_jar_without_oid
(
self
):
# Works, but nothing happens
inst
=
self
.
_makeOne
()
inst
.
_p_jar
=
object
()
inst
.
_p_oid
=
None
object
.
__setattr__
(
inst
,
'_Persistent__flags'
,
None
)
inst
.
_p_activate
()
def
test_p_accessed_with_jar_without_oid
(
self
):
# Works, but nothing happens
inst
=
self
.
_makeOne
()
inst
.
_p_jar
=
object
()
inst
.
_p_accessed
()
def
test_p_accessed_with_jar_with_oid_as_ghost
(
self
):
# Works, but nothing happens
inst
=
self
.
_makeOne
()
inst
.
_p_jar
=
object
()
inst
.
_p_oid
=
42
inst
.
_Persistent__flags
=
None
inst
.
_p_accessed
()
_add_to_suite
=
[
PyPersistentTests
]
_add_to_suite
=
[
PyPersistentTests
]
if
not
os
.
environ
.
get
(
'PURE_PYTHON'
):
if
not
os
.
environ
.
get
(
'PURE_PYTHON'
):
...
...
persistent/tests/test_picklecache.py
View file @
39c1f033
...
@@ -11,12 +11,33 @@
...
@@ -11,12 +11,33 @@
# FOR A PARTICULAR PURPOSE.
# FOR A PARTICULAR PURPOSE.
#
#
##############################################################################
##############################################################################
import
gc
import
os
import
platform
import
sys
import
unittest
import
unittest
_py_impl
=
getattr
(
platform
,
'python_implementation'
,
lambda
:
None
)
_is_pypy
=
_py_impl
()
==
'PyPy'
_is_jython
=
'java'
in
sys
.
platform
_marker
=
object
()
_marker
=
object
()
class
PickleCacheTests
(
unittest
.
TestCase
):
class
PickleCacheTests
(
unittest
.
TestCase
):
def
setUp
(
self
):
import
persistent.picklecache
self
.
orig_types
=
persistent
.
picklecache
.
_CACHEABLE_TYPES
persistent
.
picklecache
.
_CACHEABLE_TYPES
+=
(
DummyPersistent
,)
self
.
orig_sweep_gc
=
persistent
.
picklecache
.
_SWEEP_NEEDS_GC
persistent
.
picklecache
.
_SWEEP_NEEDS_GC
=
True
# coverage
def
tearDown
(
self
):
import
persistent.picklecache
persistent
.
picklecache
.
_CACHEABLE_TYPES
=
self
.
orig_types
persistent
.
picklecache
.
_SWEEP_NEEDS_GC
=
self
.
orig_sweep_gc
def
_getTargetClass
(
self
):
def
_getTargetClass
(
self
):
from
persistent.picklecache
import
PickleCache
from
persistent.picklecache
import
PickleCache
return
PickleCache
return
PickleCache
...
@@ -79,12 +100,12 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -79,12 +100,12 @@ class PickleCacheTests(unittest.TestCase):
self
.
assertTrue
(
cache
.
get
(
'nonesuch'
,
default
)
is
default
)
self
.
assertTrue
(
cache
.
get
(
'nonesuch'
,
default
)
is
default
)
def
test___setitem___non_string_oid_raises_
Valu
eError
(
self
):
def
test___setitem___non_string_oid_raises_
Typ
eError
(
self
):
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
try
:
try
:
cache
[
object
()]
=
self
.
_makePersist
()
cache
[
object
()]
=
self
.
_makePersist
()
except
Valu
eError
:
except
Typ
eError
:
pass
pass
else
:
else
:
self
.
fail
(
"Didn't raise ValueError with non-string OID."
)
self
.
fail
(
"Didn't raise ValueError with non-string OID."
)
...
@@ -93,21 +114,21 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -93,21 +114,21 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'original'
)
KEY
=
_b
(
'original'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
original
=
self
.
_makePersist
()
original
=
self
.
_makePersist
(
oid
=
KEY
)
cache
[
KEY
]
=
original
cache
[
KEY
]
=
original
cache
[
KEY
]
=
original
cache
[
KEY
]
=
original
def
test___setitem___duplicate_oid_raises_
Key
Error
(
self
):
def
test___setitem___duplicate_oid_raises_
Value
Error
(
self
):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'original'
)
KEY
=
_b
(
'original'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
original
=
self
.
_makePersist
()
original
=
self
.
_makePersist
(
oid
=
KEY
)
cache
[
KEY
]
=
original
cache
[
KEY
]
=
original
duplicate
=
self
.
_makePersist
()
duplicate
=
self
.
_makePersist
(
oid
=
KEY
)
try
:
try
:
cache
[
KEY
]
=
duplicate
cache
[
KEY
]
=
duplicate
except
Key
Error
:
except
Value
Error
:
pass
pass
else
:
else
:
self
.
fail
(
"Didn't raise KeyError with duplicate OID."
)
self
.
fail
(
"Didn't raise KeyError with duplicate OID."
)
...
@@ -117,7 +138,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -117,7 +138,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'ghost'
)
KEY
=
_b
(
'ghost'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
ghost
=
self
.
_makePersist
(
state
=
GHOST
)
ghost
=
self
.
_makePersist
(
state
=
GHOST
,
oid
=
KEY
)
cache
[
KEY
]
=
ghost
cache
[
KEY
]
=
ghost
...
@@ -130,13 +151,28 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -130,13 +151,28 @@ class PickleCacheTests(unittest.TestCase):
self
.
assertTrue
(
items
[
0
][
1
]
is
ghost
)
self
.
assertTrue
(
items
[
0
][
1
]
is
ghost
)
self
.
assertTrue
(
cache
[
KEY
]
is
ghost
)
self
.
assertTrue
(
cache
[
KEY
]
is
ghost
)
def
test___setitem___
non_ghost
(
self
):
def
test___setitem___
mismatch_key_oid
(
self
):
from
persistent.interfaces
import
UPTODATE
from
persistent.interfaces
import
UPTODATE
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'uptodate'
)
KEY
=
_b
(
'uptodate'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
)
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
)
try
:
cache
[
KEY
]
=
uptodate
except
ValueError
:
pass
else
:
self
.
fail
(
"Didn't raise ValueError when the key didn't match the OID"
)
def
test___setitem___non_ghost
(
self
):
from
persistent.interfaces
import
UPTODATE
from
persistent._compat
import
_b
KEY
=
_b
(
'uptodate'
)
cache
=
self
.
_makeOne
()
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
,
oid
=
KEY
)
cache
[
KEY
]
=
uptodate
cache
[
KEY
]
=
uptodate
self
.
assertEqual
(
len
(
cache
),
1
)
self
.
assertEqual
(
len
(
cache
),
1
)
...
@@ -153,7 +189,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -153,7 +189,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'pclass'
)
KEY
=
_b
(
'pclass'
)
class
pclass
(
object
):
class
pclass
(
object
):
pass
_p_oid
=
KEY
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
cache
[
KEY
]
=
pclass
cache
[
KEY
]
=
pclass
...
@@ -167,12 +203,12 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -167,12 +203,12 @@ class PickleCacheTests(unittest.TestCase):
self
.
assertTrue
(
cache
[
KEY
]
is
pclass
)
self
.
assertTrue
(
cache
[
KEY
]
is
pclass
)
self
.
assertTrue
(
cache
.
get
(
KEY
)
is
pclass
)
self
.
assertTrue
(
cache
.
get
(
KEY
)
is
pclass
)
def
test___delitem___non_string_oid_raises_
Valu
eError
(
self
):
def
test___delitem___non_string_oid_raises_
Typ
eError
(
self
):
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
try
:
try
:
del
cache
[
object
()]
del
cache
[
object
()]
except
Valu
eError
:
except
Typ
eError
:
pass
pass
else
:
else
:
self
.
fail
(
"Didn't raise ValueError with non-string OID."
)
self
.
fail
(
"Didn't raise ValueError with non-string OID."
)
...
@@ -194,7 +230,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -194,7 +230,7 @@ class PickleCacheTests(unittest.TestCase):
KEY
=
_b
(
'pclass'
)
KEY
=
_b
(
'pclass'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
class
pclass
(
object
):
class
pclass
(
object
):
pass
_p_oid
=
KEY
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
cache
[
KEY
]
=
pclass
cache
[
KEY
]
=
pclass
...
@@ -208,7 +244,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -208,7 +244,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'uptodate'
)
KEY
=
_b
(
'uptodate'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
)
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
,
oid
=
KEY
)
cache
[
KEY
]
=
uptodate
cache
[
KEY
]
=
uptodate
...
@@ -219,9 +255,9 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -219,9 +255,9 @@ class PickleCacheTests(unittest.TestCase):
from
persistent.interfaces
import
GHOST
from
persistent.interfaces
import
GHOST
from
persistent._compat
import
_b
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
ghost
=
self
.
_makePersist
(
state
=
GHOST
)
KEY
=
_b
(
'ghost'
)
KEY
=
_b
(
'ghost'
)
ghost
=
self
.
_makePersist
(
state
=
GHOST
,
oid
=
KEY
)
cache
[
KEY
]
=
ghost
cache
[
KEY
]
=
ghost
del
cache
[
KEY
]
del
cache
[
KEY
]
...
@@ -231,11 +267,11 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -231,11 +267,11 @@ class PickleCacheTests(unittest.TestCase):
from
persistent.interfaces
import
UPTODATE
from
persistent.interfaces
import
UPTODATE
from
persistent._compat
import
_b
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
remains
=
self
.
_makePersist
(
state
=
UPTODATE
)
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
)
REMAINS
=
_b
(
'remains'
)
REMAINS
=
_b
(
'remains'
)
UPTODATE
=
_b
(
'uptodate'
)
UPTODATE
=
_b
(
'uptodate'
)
remains
=
self
.
_makePersist
(
state
=
UPTODATE
,
oid
=
REMAINS
)
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
,
oid
=
UPTODATE
)
cache
[
REMAINS
]
=
remains
cache
[
REMAINS
]
=
remains
cache
[
UPTODATE
]
=
uptodate
cache
[
UPTODATE
]
=
uptodate
...
@@ -423,7 +459,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -423,7 +459,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
cache
.
drain_resistance
=
2
cache
.
drain_resistance
=
2
cache
.
target
_size
=
90
cache
.
cache
_size
=
90
oids
=
[]
oids
=
[]
for
i
in
range
(
100
):
for
i
in
range
(
100
):
oid
=
_b
(
'oid_%04d'
%
i
)
oid
=
_b
(
'oid_%04d'
%
i
)
...
@@ -451,7 +487,6 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -451,7 +487,6 @@ class PickleCacheTests(unittest.TestCase):
gc
.
collect
()
# banish the ghosts who are no longer in the ring
gc
.
collect
()
# banish the ghosts who are no longer in the ring
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
0
)
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
0
)
self
.
assertTrue
(
cache
.
ring
.
next
is
cache
.
ring
)
for
oid
in
oids
:
for
oid
in
oids
:
self
.
assertTrue
(
cache
.
get
(
oid
)
is
None
)
self
.
assertTrue
(
cache
.
get
(
oid
)
is
None
)
...
@@ -474,7 +509,6 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -474,7 +509,6 @@ class PickleCacheTests(unittest.TestCase):
gc
.
collect
()
# banish the ghosts who are no longer in the ring
gc
.
collect
()
# banish the ghosts who are no longer in the ring
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
1
)
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
1
)
self
.
assertTrue
(
cache
.
ring
.
next
is
not
cache
.
ring
)
self
.
assertTrue
(
cache
.
get
(
oids
[
0
])
is
not
None
)
self
.
assertTrue
(
cache
.
get
(
oids
[
0
])
is
not
None
)
for
oid
in
oids
[
1
:]:
for
oid
in
oids
[
1
:]:
...
@@ -498,7 +532,6 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -498,7 +532,6 @@ class PickleCacheTests(unittest.TestCase):
gc
.
collect
()
# banish the ghosts who are no longer in the ring
gc
.
collect
()
# banish the ghosts who are no longer in the ring
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
1
)
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
1
)
self
.
assertTrue
(
cache
.
ring
.
next
is
not
cache
.
ring
)
self
.
assertTrue
(
cache
.
get
(
oids
[
0
])
is
not
None
)
self
.
assertTrue
(
cache
.
get
(
oids
[
0
])
is
not
None
)
for
oid
in
oids
[
1
:]:
for
oid
in
oids
[
1
:]:
...
@@ -524,6 +557,23 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -524,6 +557,23 @@ class PickleCacheTests(unittest.TestCase):
for
oid
in
oids
:
for
oid
in
oids
:
self
.
assertTrue
(
cache
.
get
(
oid
)
is
None
)
self
.
assertTrue
(
cache
.
get
(
oid
)
is
None
)
def
test_minimize_turns_into_ghosts
(
self
):
import
gc
from
persistent.interfaces
import
UPTODATE
from
persistent.interfaces
import
GHOST
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
oid
=
_b
(
'oid_%04d'
%
1
)
obj
=
cache
[
oid
]
=
self
.
_makePersist
(
oid
=
oid
,
state
=
UPTODATE
)
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
1
)
cache
.
minimize
()
gc
.
collect
()
# banish the ghosts who are no longer in the ring
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
0
)
self
.
assertEqual
(
obj
.
_p_state
,
GHOST
)
def
test_new_ghost_non_persistent_object
(
self
):
def
test_new_ghost_non_persistent_object
(
self
):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
...
@@ -549,8 +599,17 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -549,8 +599,17 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'123'
)
KEY
=
_b
(
'123'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
candidate
=
self
.
_makePersist
(
oid
=
None
,
jar
=
None
)
candidate
=
self
.
_makePersist
(
oid
=
KEY
)
cache
[
KEY
]
=
candidate
cache
[
KEY
]
=
candidate
# Now, normally we can't get in the cache without an oid and jar
# (the C implementation doesn't allow it), so if we try to create
# a ghost, we get the value error
self
.
assertRaises
(
ValueError
,
cache
.
new_ghost
,
KEY
,
candidate
)
candidate
.
_p_oid
=
None
self
.
assertRaises
(
ValueError
,
cache
.
new_ghost
,
KEY
,
candidate
)
# if we're sneaky and remove the OID and jar, then we get the duplicate
# key error
candidate
.
_p_jar
=
None
self
.
assertRaises
(
KeyError
,
cache
.
new_ghost
,
KEY
,
candidate
)
self
.
assertRaises
(
KeyError
,
cache
.
new_ghost
,
KEY
,
candidate
)
def
test_new_ghost_success_already_ghost
(
self
):
def
test_new_ghost_success_already_ghost
(
self
):
...
@@ -740,7 +799,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -740,7 +799,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'123'
)
KEY
=
_b
(
'123'
)
class
Pclass
(
object
):
class
Pclass
(
object
):
_p_oid
=
None
_p_oid
=
KEY
_p_jar
=
None
_p_jar
=
None
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
cache
[
KEY
]
=
Pclass
cache
[
KEY
]
=
Pclass
...
@@ -754,7 +813,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -754,7 +813,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'pclass'
)
KEY
=
_b
(
'pclass'
)
class
pclass
(
object
):
class
pclass
(
object
):
pass
_p_oid
=
KEY
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
pclass
.
_p_state
=
UPTODATE
pclass
.
_p_state
=
UPTODATE
cache
[
KEY
]
=
pclass
cache
[
KEY
]
=
pclass
...
@@ -775,7 +834,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -775,7 +834,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'uptodate'
)
KEY
=
_b
(
'uptodate'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
)
uptodate
=
self
.
_makePersist
(
state
=
UPTODATE
,
oid
=
KEY
)
cache
[
KEY
]
=
uptodate
cache
[
KEY
]
=
uptodate
gc
.
collect
()
# pypy vs. refcounting
gc
.
collect
()
# pypy vs. refcounting
...
@@ -795,7 +854,7 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -795,7 +854,7 @@ class PickleCacheTests(unittest.TestCase):
from
persistent._compat
import
_b
from
persistent._compat
import
_b
KEY
=
_b
(
'ghost'
)
KEY
=
_b
(
'ghost'
)
cache
=
self
.
_makeOne
()
cache
=
self
.
_makeOne
()
ghost
=
self
.
_makePersist
(
state
=
GHOST
)
ghost
=
self
.
_makePersist
(
state
=
GHOST
,
oid
=
KEY
)
cache
[
KEY
]
=
ghost
cache
[
KEY
]
=
ghost
gc
.
collect
()
# pypy vs. refcounting
gc
.
collect
()
# pypy vs. refcounting
...
@@ -808,6 +867,201 @@ class PickleCacheTests(unittest.TestCase):
...
@@ -808,6 +867,201 @@ class PickleCacheTests(unittest.TestCase):
self
.
assertEqual
(
typ
,
'DummyPersistent'
)
self
.
assertEqual
(
typ
,
'DummyPersistent'
)
self
.
assertEqual
(
state
,
GHOST
)
self
.
assertEqual
(
state
,
GHOST
)
def
test_init_with_cacheless_jar
(
self
):
# Sometimes ZODB tests pass objects that don't
# have a _cache
class
Jar
(
object
):
was_set
=
False
def
__setattr__
(
self
,
name
,
value
):
if
name
==
'_cache'
:
object
.
__setattr__
(
self
,
'was_set'
,
True
)
raise
AttributeError
(
name
)
jar
=
Jar
()
self
.
_makeOne
(
jar
)
self
.
assertTrue
(
jar
.
was_set
)
def
test_setting_non_persistent_item
(
self
):
cache
=
self
.
_makeOne
()
try
:
cache
[
None
]
=
object
()
except
TypeError
as
e
:
self
.
assertEqual
(
str
(
e
),
"Cache values must be persistent objects."
)
else
:
self
.
fail
(
"Should raise TypeError"
)
def
test_setting_without_jar
(
self
):
cache
=
self
.
_makeOne
()
p
=
self
.
_makePersist
(
jar
=
None
)
try
:
cache
[
p
.
_p_oid
]
=
p
except
ValueError
as
e
:
self
.
assertEqual
(
str
(
e
),
"Cached object jar missing"
)
else
:
self
.
fail
(
"Should raise ValueError"
)
def
test_setting_already_cached
(
self
):
cache1
=
self
.
_makeOne
()
p
=
self
.
_makePersist
(
jar
=
cache1
.
jar
)
cache1
[
p
.
_p_oid
]
=
p
cache2
=
self
.
_makeOne
()
try
:
cache2
[
p
.
_p_oid
]
=
p
except
ValueError
as
e
:
self
.
assertEqual
(
str
(
e
),
"Object already in another cache"
)
else
:
self
.
fail
(
"Should raise value error"
)
def
test_cannot_update_mru_while_already_locked
(
self
):
cache
=
self
.
_makeOne
()
cache
.
_is_sweeping_ring
=
True
updated
=
cache
.
mru
(
None
)
self
.
assertFalse
(
updated
)
def
test_update_object_size_estimation_simple
(
self
):
cache
=
self
.
_makeOne
()
p
=
self
.
_makePersist
(
jar
=
cache
.
jar
)
cache
[
p
.
_p_oid
]
=
p
# The cache accesses the private attribute directly to bypass
# the bit conversion.
# Note that the _p_estimated_size is set *after*
# the update call is made in ZODB's serialize
p
.
_Persistent__size
=
0
cache
.
update_object_size_estimation
(
p
.
_p_oid
,
2
)
self
.
assertEqual
(
cache
.
total_estimated_size
,
64
)
# A missing object does nothing
cache
.
update_object_size_estimation
(
None
,
2
)
self
.
assertEqual
(
cache
.
total_estimated_size
,
64
)
def
test_cache_size
(
self
):
size
=
42
cache
=
self
.
_makeOne
(
target_size
=
size
)
self
.
assertEqual
(
cache
.
cache_size
,
size
)
cache
.
cache_size
=
64
self
.
assertEqual
(
cache
.
cache_size
,
64
)
def
test_sweep_empty
(
self
):
cache
=
self
.
_makeOne
()
self
.
assertEqual
(
cache
.
incrgc
(),
0
)
def
test_sweep_of_non_deactivating_object
(
self
):
cache
=
self
.
_makeOne
()
p
=
self
.
_makePersist
(
jar
=
cache
.
jar
)
p
.
_p_state
=
0
# non-ghost, get in the ring
cache
[
p
.
_p_oid
]
=
p
def
bad_deactivate
():
"Doesn't call super, for it's own reasons, so can't be ejected"
return
p
.
_p_deactivate
=
bad_deactivate
import
persistent.picklecache
sweep_types
=
persistent
.
picklecache
.
_SWEEPABLE_TYPES
persistent
.
picklecache
.
_SWEEPABLE_TYPES
=
DummyPersistent
try
:
self
.
assertEqual
(
cache
.
full_sweep
(),
0
)
finally
:
persistent
.
picklecache
.
_SWEEPABLE_TYPES
=
sweep_types
del
p
.
_p_deactivate
self
.
assertEqual
(
cache
.
full_sweep
(),
1
)
if
_is_jython
:
def
with_deterministic_gc
(
f
):
def
test
(
self
):
old_flags
=
gc
.
getMonitorGlobal
()
gc
.
setMonitorGlobal
(
True
)
try
:
f
(
self
,
force_collect
=
True
)
finally
:
gc
.
setMonitorGlobal
(
old_flags
)
return
test
else
:
def
with_deterministic_gc
(
f
):
return
f
@
with_deterministic_gc
def
test_cache_garbage_collection_bytes_also_deactivates_object
(
self
,
force_collect
=
False
):
from
persistent.interfaces
import
UPTODATE
from
persistent._compat
import
_b
cache
=
self
.
_makeOne
()
cache
.
cache_size
=
1000
oids
=
[]
for
i
in
range
(
100
):
oid
=
_b
(
'oid_%04d'
%
i
)
oids
.
append
(
oid
)
o
=
cache
[
oid
]
=
self
.
_makePersist
(
oid
=
oid
,
state
=
UPTODATE
)
o
.
_Persistent__size
=
0
# must start 0, ZODB sets it AFTER updating the size
cache
.
update_object_size_estimation
(
oid
,
64
)
o
.
_Persistent__size
=
2
# mimic what the real persistent object does to update the cache
# size; if we don't get deactivated by sweeping, the cache size
# won't shrink so this also validates that _p_deactivate gets
# called when ejecting an object.
o
.
_p_deactivate
=
lambda
:
cache
.
update_object_size_estimation
(
oid
,
-
1
)
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
100
)
# A GC at this point does nothing
cache
.
incrgc
()
self
.
assertEqual
(
cache
.
cache_non_ghost_count
,
100
)
self
.
assertEqual
(
len
(
cache
),
100
)
# Now if we set a byte target:
cache
.
cache_size_bytes
=
1
# verify the change worked as expected
self
.
assertEqual
(
cache
.
cache_size_bytes
,
1
)
# verify our entrance assumption is fulfilled
self
.
assertTrue
(
cache
.
cache_size
>
100
)
self
.
assertTrue
(
cache
.
total_estimated_size
>
1
)
# A gc shrinks the bytes
cache
.
incrgc
()
self
.
assertEqual
(
cache
.
total_estimated_size
,
0
)
# It also shrank the measured size of the cache;
# this would fail under PyPy if _SWEEP_NEEDS_GC was False
if
force_collect
:
gc
.
collect
()
self
.
assertEqual
(
len
(
cache
),
1
)
def
test_invalidate_persistent_class_calls_p_invalidate
(
self
):
from
persistent._compat
import
_b
KEY
=
_b
(
'pclass'
)
class
pclass
(
object
):
_p_oid
=
KEY
invalidated
=
False
@
classmethod
def
_p_invalidate
(
cls
):
cls
.
invalidated
=
True
cache
=
self
.
_makeOne
()
cache
[
KEY
]
=
pclass
cache
.
invalidate
(
KEY
)
self
.
assertTrue
(
pclass
.
invalidated
)
def
test_ring_impl
(
self
):
from
..
import
ring
if
_is_pypy
or
os
.
getenv
(
'USING_CFFI'
):
self
.
assertTrue
(
ring
.
Ring
is
ring
.
_CFFIRing
)
else
:
self
.
assertTrue
(
ring
.
Ring
is
ring
.
_DequeRing
)
class
DummyPersistent
(
object
):
class
DummyPersistent
(
object
):
...
@@ -815,6 +1069,9 @@ class DummyPersistent(object):
...
@@ -815,6 +1069,9 @@ class DummyPersistent(object):
from
persistent.interfaces
import
GHOST
from
persistent.interfaces
import
GHOST
self
.
_p_state
=
GHOST
self
.
_p_state
=
GHOST
_p_deactivate
=
_p_invalidate
_p_invalidate_deactivate_helper
=
_p_invalidate
def
_p_activate
(
self
):
def
_p_activate
(
self
):
from
persistent.interfaces
import
UPTODATE
from
persistent.interfaces
import
UPTODATE
self
.
_p_state
=
UPTODATE
self
.
_p_state
=
UPTODATE
...
...
persistent/tests/test_ring.py
0 → 100644
View file @
39c1f033
##############################################################################
#
# Copyright (c) 2015 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import
unittest
from
..
import
ring
#pylint: disable=R0904,W0212,E1101
class
DummyPersistent
(
object
):
_p_oid
=
None
__next_oid
=
0
@
classmethod
def
_next_oid
(
cls
):
cls
.
__next_oid
+=
1
return
cls
.
__next_oid
def
__init__
(
self
,
oid
=
None
):
if
oid
is
None
:
self
.
_p_oid
=
self
.
_next_oid
()
def
__repr__
(
self
):
return
"<Dummy %r>"
%
self
.
_p_oid
class
_Ring_Base
(
object
):
def
_getTargetClass
(
self
):
"""Return the type of the ring to test"""
raise
NotImplementedError
()
def
_makeOne
(
self
):
return
self
.
_getTargetClass
()()
def
test_empty_len
(
self
):
self
.
assertEqual
(
0
,
len
(
self
.
_makeOne
()))
def
test_empty_contains
(
self
):
r
=
self
.
_makeOne
()
self
.
assertFalse
(
DummyPersistent
()
in
r
)
def
test_empty_iter
(
self
):
self
.
assertEqual
([],
list
(
self
.
_makeOne
()))
def
test_add_one_len1
(
self
):
r
=
self
.
_makeOne
()
p
=
DummyPersistent
()
r
.
add
(
p
)
self
.
assertEqual
(
1
,
len
(
r
))
def
test_add_one_contains
(
self
):
r
=
self
.
_makeOne
()
p
=
DummyPersistent
()
r
.
add
(
p
)
self
.
assertTrue
(
p
in
r
)
def
test_delete_one_len0
(
self
):
r
=
self
.
_makeOne
()
p
=
DummyPersistent
()
r
.
add
(
p
)
r
.
delete
(
p
)
self
.
assertEqual
(
0
,
len
(
r
))
def
test_delete_one_multiple
(
self
):
r
=
self
.
_makeOne
()
p
=
DummyPersistent
()
r
.
add
(
p
)
r
.
delete
(
p
)
self
.
assertEqual
(
0
,
len
(
r
))
self
.
assertFalse
(
p
in
r
)
r
.
delete
(
p
)
self
.
assertEqual
(
0
,
len
(
r
))
self
.
assertFalse
(
p
in
r
)
def
test_delete_from_wrong_ring
(
self
):
r1
=
self
.
_makeOne
()
r2
=
self
.
_makeOne
()
p1
=
DummyPersistent
()
p2
=
DummyPersistent
()
r1
.
add
(
p1
)
r2
.
add
(
p2
)
r2
.
delete
(
p1
)
self
.
assertEqual
(
1
,
len
(
r1
))
self
.
assertEqual
(
1
,
len
(
r2
))
self
.
assertEqual
([
p1
],
list
(
r1
))
self
.
assertEqual
([
p2
],
list
(
r2
))
def
test_move_to_head
(
self
):
r
=
self
.
_makeOne
()
p1
=
DummyPersistent
()
p2
=
DummyPersistent
()
p3
=
DummyPersistent
()
r
.
add
(
p1
)
r
.
add
(
p2
)
r
.
add
(
p3
)
self
.
assertEqual
([
p1
,
p2
,
p3
],
list
(
r
))
self
.
assertEqual
(
3
,
len
(
r
))
r
.
move_to_head
(
p1
)
self
.
assertEqual
([
p2
,
p3
,
p1
],
list
(
r
))
r
.
move_to_head
(
p3
)
self
.
assertEqual
([
p2
,
p1
,
p3
],
list
(
r
))
r
.
move_to_head
(
p3
)
self
.
assertEqual
([
p2
,
p1
,
p3
],
list
(
r
))
def
test_delete_all
(
self
):
r
=
self
.
_makeOne
()
p1
=
DummyPersistent
()
p2
=
DummyPersistent
()
p3
=
DummyPersistent
()
r
.
add
(
p1
)
r
.
add
(
p2
)
r
.
add
(
p3
)
self
.
assertEqual
([
p1
,
p2
,
p3
],
list
(
r
))
r
.
delete_all
([(
0
,
p1
),
(
2
,
p3
)])
self
.
assertEqual
([
p2
],
list
(
r
))
self
.
assertEqual
(
1
,
len
(
r
))
class
DequeRingTests
(
unittest
.
TestCase
,
_Ring_Base
):
def
_getTargetClass
(
self
):
return
ring
.
_DequeRing
_add_to_suite
=
[
DequeRingTests
]
if
ring
.
_CFFIRing
:
class
CFFIRingTests
(
unittest
.
TestCase
,
_Ring_Base
):
def
_getTargetClass
(
self
):
return
ring
.
_CFFIRing
_add_to_suite
.
append
(
CFFIRingTests
)
def
test_suite
():
return
unittest
.
TestSuite
([
unittest
.
makeSuite
(
x
)
for
x
in
_add_to_suite
])
persistent/tests/test_timestamp.py
View file @
39c1f033
...
@@ -14,6 +14,11 @@
...
@@ -14,6 +14,11 @@
import
operator
import
operator
import
unittest
import
unittest
import
platform
py_impl
=
getattr
(
platform
,
'python_implementation'
,
lambda
:
None
)
_is_jython
=
py_impl
()
==
'Jython'
class
Test__UTC
(
unittest
.
TestCase
):
class
Test__UTC
(
unittest
.
TestCase
):
def
_getTargetClass
(
self
):
def
_getTargetClass
(
self
):
...
@@ -271,26 +276,37 @@ class PyAndCComparisonTests(unittest.TestCase):
...
@@ -271,26 +276,37 @@ class PyAndCComparisonTests(unittest.TestCase):
py
=
self
.
_makePy
(
*
self
.
now_ts_args
)
py
=
self
.
_makePy
(
*
self
.
now_ts_args
)
self
.
assertEqual
(
hash
(
py
),
bit_32_hash
)
self
.
assertEqual
(
hash
(
py
),
bit_32_hash
)
persistent
.
timestamp
.
c_long
=
ctypes
.
c_int64
persistent
.
timestamp
.
c_long
=
ctypes
.
c_int64
# call __hash__ directly to avoid interpreter truncation
# call __hash__ directly to avoid interpreter truncation
# in hash() on 32-bit platforms
# in hash() on 32-bit platforms
if
not
_is_jython
:
self
.
assertEqual
(
py
.
__hash__
(),
bit_64_hash
)
self
.
assertEqual
(
py
.
__hash__
(),
bit_64_hash
)
else
:
# Jython 2.7's ctypes module doesn't properly
# implement the 'value' attribute by truncating.
# (It does for native calls, but not visibly to Python).
# Therefore we get back the full python long. The actual
# hash() calls are correct, though, because the JVM uses
# 32-bit ints for its hashCode methods.
self
.
assertEqual
(
py
.
__hash__
(),
384009219096809580920179179233996861765753210540033
)
finally
:
finally
:
persistent
.
timestamp
.
c_long
=
orig_c_long
persistent
.
timestamp
.
c_long
=
orig_c_long
# These are *usually* aliases, but aren't required
# to be (and aren't under Jython 2.7).
if
orig_c_long
is
ctypes
.
c_int32
:
if
orig_c_long
is
ctypes
.
c_int32
:
self
.
assertEqual
(
py
.
__hash__
(),
bit_32_hash
)
self
.
assertEqual
(
py
.
__hash__
(),
bit_32_hash
)
elif
orig_c_long
is
ctypes
.
c_int64
:
elif
orig_c_long
is
ctypes
.
c_int64
:
self
.
assertEqual
(
py
.
__hash__
(),
bit_64_hash
)
self
.
assertEqual
(
py
.
__hash__
(),
bit_64_hash
)
else
:
self
.
fail
(
"Unknown bitness"
)
def
test_hash_equal_constants
(
self
):
def
test_hash_equal_constants
(
self
):
# The simple constants make it easier to diagnose
# The simple constants make it easier to diagnose
# a difference in algorithms
# a difference in algorithms
import
persistent.timestamp
import
persistent.timestamp
import
ctypes
import
ctypes
is_32_bit
=
persistent
.
timestamp
.
c_long
==
ctypes
.
c_int32
# We get 32-bit hash values of 32-bit platforms, or on the JVM
is_32_bit
=
persistent
.
timestamp
.
c_long
==
ctypes
.
c_int32
or
_is_jython
c
,
py
=
self
.
_make_C_and_Py
(
b'
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
'
)
c
,
py
=
self
.
_make_C_and_Py
(
b'
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
\
x00
'
)
self
.
assertEqual
(
hash
(
c
),
8
)
self
.
assertEqual
(
hash
(
c
),
8
)
...
...
tox.ini
View file @
39c1f033
[tox]
[tox]
envlist
=
envlist
=
# Jython
support pending 2.7 support, due 2012-07-15 or so. See:
# Jython
2.7rc2 does work, but unfortunately has an issue running
#
http://fwierzbicki.blogspot.com/2012/03/adconion-to-fund-jython-27.html
#
with Tox 1.9.2 (http://bugs.jython.org/issue2325)
# py26,py27,py
32,jython,pypy
,coverage,docs
# py26,py27,py
27-pure,pypy,py32,py33,py34,pypy3,jython
,coverage,docs
py26,py27,py27-pure,pypy,py32,py33,py34,pypy3,coverage,docs
py26,py27,py27-pure,py
27-pure-cffi,py
py,py32,py33,py34,pypy3,coverage,docs
[testenv]
[testenv]
deps
=
deps
=
...
@@ -25,9 +25,24 @@ deps =
...
@@ -25,9 +25,24 @@ deps =
commands
=
commands
=
python
setup.py
test
-q
python
setup.py
test
-q
[testenv:py27-pure-cffi]
basepython
=
python2.7
setenv
=
PURE_PYTHON
=
1
USING_CFFI
=
1
deps
=
{
[testenv]
deps}
cffi
commands
=
python
setup.py
test
-q
[testenv:coverage]
[testenv:coverage]
basepython
=
basepython
=
python2.6
python2.6
setenv
=
USING_CFFI
=
1
commands
=
commands
=
nosetests
--with-xunit
--with-xcoverage
nosetests
--with-xunit
--with-xcoverage
deps
=
deps
=
...
@@ -35,6 +50,7 @@ deps =
...
@@ -35,6 +50,7 @@ deps =
nose
nose
coverage
coverage
nosexcover
nosexcover
cffi
[testenv:docs]
[testenv:docs]
basepython
=
basepython
=
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment