Commit 52d1d182 authored by Chris Toshok's avatar Chris Toshok

get weakrefs in and working for functions/instancemethods

bring in the cpython implementation of weakrefs, and post init_weakref()
overwrite some fields in the BoxedClass's for weakref.ref/proxy/callableproxy
so that they participate in our GC (we also make use of their tp_traverse
functions for scanning and tp_clear functions as their simple_destructor.)

as we sweep the heap:

1) any unreachable objects that have weakreferences are kept alive
   and placed in an std::list called weakly_referenced.

2) any unreachable weakref.ref objects are cleared and removed from
   their referent's list.

After sweeping the entire heap, we then loop over the objects in
weakly_referenced.  If an object in the list still has weak references,
we loop over them clearing their target (setting it to None), and
calling their callback if they have one.  test/tests/weakref1.py tests this.
parent 21a98b05
...@@ -54,3 +54,5 @@ compile.log ...@@ -54,3 +54,5 @@ compile.log
*.swo *.swo
*.out *.out
*~
...@@ -291,8 +291,8 @@ STDLIB_OBJS := stdlib.bc.o stdlib.stripped.bc.o ...@@ -291,8 +291,8 @@ STDLIB_OBJS := stdlib.bc.o stdlib.stripped.bc.o
STDLIB_RELEASE_OBJS := stdlib.release.bc.o STDLIB_RELEASE_OBJS := stdlib.release.bc.o
ASM_SRCS := $(wildcard src/runtime/*.S) ASM_SRCS := $(wildcard src/runtime/*.S)
STDMODULE_SRCS := errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c $(EXTRA_STDMODULE_SRCS) STDMODULE_SRCS := errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c _weakref.c $(EXTRA_STDMODULE_SRCS)
STDOBJECT_SRCS := structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c $(EXTRA_STDOBJECT_SRCS) STDOBJECT_SRCS := structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c weakrefobject.c $(EXTRA_STDOBJECT_SRCS)
STDPYTHON_SRCS := pyctype.c getargs.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c $(EXTRA_STDPYTHON_SRCS) STDPYTHON_SRCS := pyctype.c getargs.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c $(EXTRA_STDPYTHON_SRCS)
FROM_CPYTHON_SRCS := $(addprefix from_cpython/Modules/,$(STDMODULE_SRCS)) $(addprefix from_cpython/Objects/,$(STDOBJECT_SRCS)) $(addprefix from_cpython/Python/,$(STDPYTHON_SRCS)) FROM_CPYTHON_SRCS := $(addprefix from_cpython/Modules/,$(STDMODULE_SRCS)) $(addprefix from_cpython/Objects/,$(STDOBJECT_SRCS)) $(addprefix from_cpython/Python/,$(STDPYTHON_SRCS))
......
...@@ -15,10 +15,10 @@ endforeach(STDLIB_FILE) ...@@ -15,10 +15,10 @@ endforeach(STDLIB_FILE)
add_custom_target(copy_stdlib ALL DEPENDS ${STDLIB_TARGETS}) add_custom_target(copy_stdlib ALL DEPENDS ${STDLIB_TARGETS})
# compile specified files in from_cpython/Modules # compile specified files in from_cpython/Modules
file(GLOB_RECURSE STDMODULE_SRCS Modules errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c) file(GLOB_RECURSE STDMODULE_SRCS Modules errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c _weakref.c)
# compile specified files in from_cpython/Objects # compile specified files in from_cpython/Objects
file(GLOB_RECURSE STDOBJECT_SRCS Objects structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c) file(GLOB_RECURSE STDOBJECT_SRCS Objects structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c weakrefobject.c)
# compile specified files in from_cpython/Python # compile specified files in from_cpython/Python
file(GLOB_RECURSE STDPYTHON_SRCS Python getargs.c pyctype.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c) file(GLOB_RECURSE STDPYTHON_SRCS Python getargs.c pyctype.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c)
......
...@@ -73,6 +73,7 @@ ...@@ -73,6 +73,7 @@
#include "iterobject.h" #include "iterobject.h"
#include "descrobject.h" #include "descrobject.h"
#include "warnings.h" #include "warnings.h"
#include "weakrefobject.h"
#include "codecs.h" #include "codecs.h"
#include "pyerrors.h" #include "pyerrors.h"
......
/* Weak references objects for Python. */
#ifndef Py_WEAKREFOBJECT_H
#define Py_WEAKREFOBJECT_H
#ifdef __cplusplus
extern "C" {
#endif
typedef struct _PyWeakReference PyWeakReference;
/* PyWeakReference is the base struct for the Python ReferenceType, ProxyType,
* and CallableProxyType.
*/
struct _PyWeakReference {
PyObject_HEAD
/* The object to which this is a weak reference, or Py_None if none.
* Note that this is a stealth reference: wr_object's refcount is
* not incremented to reflect this pointer.
*/
PyObject *wr_object;
/* A callable to invoke when wr_object dies, or NULL if none. */
PyObject *wr_callback;
/* A cache for wr_object's hash code. As usual for hashes, this is -1
* if the hash code isn't known yet.
*/
long hash;
/* If wr_object is weakly referenced, wr_object has a doubly-linked NULL-
* terminated list of weak references to it. These are the list pointers.
* If wr_object goes away, wr_object is set to Py_None, and these pointers
* have no meaning then.
*/
PyWeakReference *wr_prev;
PyWeakReference *wr_next;
};
PyAPI_DATA(PyTypeObject) _PyWeakref_RefType;
PyAPI_DATA(PyTypeObject) _PyWeakref_ProxyType;
PyAPI_DATA(PyTypeObject) _PyWeakref_CallableProxyType;
#define PyWeakref_CheckRef(op) PyObject_TypeCheck(op, &_PyWeakref_RefType)
#define PyWeakref_CheckRefExact(op) \
(Py_TYPE(op) == &_PyWeakref_RefType)
#define PyWeakref_CheckProxy(op) \
((Py_TYPE(op) == &_PyWeakref_ProxyType) || \
(Py_TYPE(op) == &_PyWeakref_CallableProxyType))
#define PyWeakref_Check(op) \
(PyWeakref_CheckRef(op) || PyWeakref_CheckProxy(op))
PyAPI_FUNC(PyObject *) PyWeakref_NewRef(PyObject *ob,
PyObject *callback) PYSTON_NOEXCEPT;
PyAPI_FUNC(PyObject *) PyWeakref_NewProxy(PyObject *ob,
PyObject *callback) PYSTON_NOEXCEPT;
PyAPI_FUNC(PyObject *) PyWeakref_GetObject(PyObject *ref) PYSTON_NOEXCEPT;
PyAPI_FUNC(Py_ssize_t) _PyWeakref_GetWeakrefCount(PyWeakReference *head) PYSTON_NOEXCEPT;
PyAPI_FUNC(void) _PyWeakref_ClearRef(PyWeakReference *self) PYSTON_NOEXCEPT;
/* Explanation for the Py_REFCNT() check: when a weakref's target is part
of a long chain of deallocations which triggers the trashcan mechanism,
clearing the weakrefs can be delayed long after the target's refcount
has dropped to zero. In the meantime, code accessing the weakref will
be able to "see" the target object even though it is supposed to be
unreachable. See issue #16602. */
/*
#define PyWeakref_GET_OBJECT(ref) \
(Py_REFCNT(((PyWeakReference *)(ref))->wr_object) > 0 \
? ((PyWeakReference *)(ref))->wr_object \
: Py_None)
*/
/* pyston version below. we'll need to worry about ensuring we keep
the same semantics of weakrefs being cleared before destructors
fire */
#define PyWeakref_GET_OBJECT(ref) ((PyWeakReference *)(ref))->wr_object
#ifdef __cplusplus
}
#endif
#endif /* !Py_WEAKREFOBJECT_H */
...@@ -96,6 +96,14 @@ init_weakref(void) ...@@ -96,6 +96,14 @@ init_weakref(void)
m = Py_InitModule3("_weakref", weakref_functions, m = Py_InitModule3("_weakref", weakref_functions,
"Weak-reference support module."); "Weak-reference support module.");
if (m != NULL) { if (m != NULL) {
// Pyston change: call PyType_Ready on our types
if (PyType_Ready(&_PyWeakref_RefType) < 0)
return;
if (PyType_Ready(&_PyWeakref_ProxyType) < 0)
return;
if (PyType_Ready(&_PyWeakref_CallableProxyType) < 0)
return;
Py_INCREF(&_PyWeakref_RefType); Py_INCREF(&_PyWeakref_RefType);
PyModule_AddObject(m, "ref", PyModule_AddObject(m, "ref",
(PyObject *) &_PyWeakref_RefType); (PyObject *) &_PyWeakref_RefType);
......
// This file is originally from CPython 2.7, with modifications for Pyston
#include "Python.h"
#include "structmember.h"
#define GET_WEAKREFS_LISTPTR(o) \
((PyWeakReference **) PyObject_GET_WEAKREFS_LISTPTR(o))
Py_ssize_t
_PyWeakref_GetWeakrefCount(PyWeakReference *head)
{
Py_ssize_t count = 0;
while (head != NULL) {
++count;
head = head->wr_next;
}
return count;
}
static void
init_weakref(PyWeakReference *self, PyObject *ob, PyObject *callback)
{
self->hash = -1;
self->wr_object = ob;
Py_XINCREF(callback);
self->wr_callback = callback;
}
static PyWeakReference *
new_weakref(PyObject *ob, PyObject *callback)
{
PyWeakReference *result;
result = PyObject_GC_New(PyWeakReference, &_PyWeakref_RefType);
if (result) {
init_weakref(result, ob, callback);
PyObject_GC_Track(result);
}
return result;
}
/* This function clears the passed-in reference and removes it from the
* list of weak references for the referent. This is the only code that
* removes an item from the doubly-linked list of weak references for an
* object; it is also responsible for clearing the callback slot.
*/
static void
clear_weakref(PyWeakReference *self)
{
PyObject *callback = self->wr_callback;
if (self->wr_object != Py_None) {
PyWeakReference **list = GET_WEAKREFS_LISTPTR(self->wr_object);
if (*list == self)
/* If 'self' is the end of the list (and thus self->wr_next == NULL)
then the weakref list itself (and thus the value of *list) will
end up being set to NULL. */
*list = self->wr_next;
self->wr_object = Py_None;
if (self->wr_prev != NULL)
self->wr_prev->wr_next = self->wr_next;
if (self->wr_next != NULL)
self->wr_next->wr_prev = self->wr_prev;
self->wr_prev = NULL;
self->wr_next = NULL;
}
if (callback != NULL) {
Py_DECREF(callback);
self->wr_callback = NULL;
}
}
/* Cyclic gc uses this to *just* clear the passed-in reference, leaving
* the callback intact and uncalled. It must be possible to call self's
* tp_dealloc() after calling this, so self has to be left in a sane enough
* state for that to work. We expect tp_dealloc to decref the callback
* then. The reason for not letting clear_weakref() decref the callback
* right now is that if the callback goes away, that may in turn trigger
* another callback (if a weak reference to the callback exists) -- running
* arbitrary Python code in the middle of gc is a disaster. The convolution
* here allows gc to delay triggering such callbacks until the world is in
* a sane state again.
*/
void
_PyWeakref_ClearRef(PyWeakReference *self)
{
PyObject *callback;
assert(self != NULL);
assert(PyWeakref_Check(self));
/* Preserve and restore the callback around clear_weakref. */
callback = self->wr_callback;
self->wr_callback = NULL;
clear_weakref(self);
self->wr_callback = callback;
}
static void
weakref_dealloc(PyObject *self)
{
PyObject_GC_UnTrack(self);
clear_weakref((PyWeakReference *) self);
Py_TYPE(self)->tp_free(self);
}
static int
gc_traverse(PyWeakReference *self, visitproc visit, void *arg)
{
Py_VISIT(self->wr_callback);
return 0;
}
static int
gc_clear(PyWeakReference *self)
{
clear_weakref(self);
return 0;
}
static PyObject *
weakref_call(PyWeakReference *self, PyObject *args, PyObject *kw)
{
static char *kwlist[] = {NULL};
if (PyArg_ParseTupleAndKeywords(args, kw, ":__call__", kwlist)) {
PyObject *object = PyWeakref_GET_OBJECT(self);
Py_INCREF(object);
return (object);
}
return NULL;
}
static long
weakref_hash(PyWeakReference *self)
{
if (self->hash != -1)
return self->hash;
if (PyWeakref_GET_OBJECT(self) == Py_None) {
PyErr_SetString(PyExc_TypeError, "weak object has gone away");
return -1;
}
self->hash = PyObject_Hash(PyWeakref_GET_OBJECT(self));
return self->hash;
}
static PyObject *
weakref_repr(PyWeakReference *self)
{
char buffer[256];
if (PyWeakref_GET_OBJECT(self) == Py_None) {
PyOS_snprintf(buffer, sizeof(buffer), "<weakref at %p; dead>", self);
}
else {
char *name = NULL;
PyObject *nameobj = PyObject_GetAttrString(PyWeakref_GET_OBJECT(self),
"__name__");
if (nameobj == NULL)
PyErr_Clear();
else if (PyString_Check(nameobj))
name = PyString_AS_STRING(nameobj);
if (name != NULL) {
PyOS_snprintf(buffer, sizeof(buffer),
"<weakref at %p; to '%.50s' at %p (%s)>",
self,
Py_TYPE(PyWeakref_GET_OBJECT(self))->tp_name,
PyWeakref_GET_OBJECT(self),
name);
}
else {
PyOS_snprintf(buffer, sizeof(buffer),
"<weakref at %p; to '%.50s' at %p>",
self,
Py_TYPE(PyWeakref_GET_OBJECT(self))->tp_name,
PyWeakref_GET_OBJECT(self));
}
Py_XDECREF(nameobj);
}
return PyString_FromString(buffer);
}
/* Weak references only support equality, not ordering. Two weak references
are equal if the underlying objects are equal. If the underlying object has
gone away, they are equal if they are identical. */
static PyObject *
weakref_richcompare(PyWeakReference* self, PyWeakReference* other, int op)
{
if ((op != Py_EQ && op != Py_NE) || self->ob_type != other->ob_type) {
Py_INCREF(Py_NotImplemented);
return Py_NotImplemented;
}
if (PyWeakref_GET_OBJECT(self) == Py_None
|| PyWeakref_GET_OBJECT(other) == Py_None) {
int res = (self == other);
if (op == Py_NE)
res = !res;
if (res)
Py_RETURN_TRUE;
else
Py_RETURN_FALSE;
}
return PyObject_RichCompare(PyWeakref_GET_OBJECT(self),
PyWeakref_GET_OBJECT(other), op);
}
/* Given the head of an object's list of weak references, extract the
* two callback-less refs (ref and proxy). Used to determine if the
* shared references exist and to determine the back link for newly
* inserted references.
*/
static void
get_basic_refs(PyWeakReference *head,
PyWeakReference **refp, PyWeakReference **proxyp)
{
*refp = NULL;
*proxyp = NULL;
if (head != NULL && head->wr_callback == NULL) {
/* We need to be careful that the "basic refs" aren't
subclasses of the main types. That complicates this a
little. */
if (PyWeakref_CheckRefExact(head)) {
*refp = head;
head = head->wr_next;
}
if (head != NULL
&& head->wr_callback == NULL
&& PyWeakref_CheckProxy(head)) {
*proxyp = head;
/* head = head->wr_next; */
}
}
}
/* Insert 'newref' in the list after 'prev'. Both must be non-NULL. */
static void
insert_after(PyWeakReference *newref, PyWeakReference *prev)
{
newref->wr_prev = prev;
newref->wr_next = prev->wr_next;
if (prev->wr_next != NULL)
prev->wr_next->wr_prev = newref;
prev->wr_next = newref;
}
/* Insert 'newref' at the head of the list; 'list' points to the variable
* that stores the head.
*/
static void
insert_head(PyWeakReference *newref, PyWeakReference **list)
{
PyWeakReference *next = *list;
newref->wr_prev = NULL;
newref->wr_next = next;
if (next != NULL)
next->wr_prev = newref;
*list = newref;
}
static int
parse_weakref_init_args(char *funcname, PyObject *args, PyObject *kwargs,
PyObject **obp, PyObject **callbackp)
{
/* XXX Should check that kwargs == NULL or is empty. */
return PyArg_UnpackTuple(args, funcname, 1, 2, obp, callbackp);
}
static PyObject *
weakref___new__(PyTypeObject *type, PyObject *args, PyObject *kwargs)
{
PyWeakReference *self = NULL;
PyObject *ob, *callback = NULL;
if (parse_weakref_init_args("__new__", args, kwargs, &ob, &callback)) {
PyWeakReference *ref, *proxy;
PyWeakReference **list;
if (!PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
PyErr_Format(PyExc_TypeError,
"cannot create weak reference to '%s' object",
Py_TYPE(ob)->tp_name);
return NULL;
}
if (callback == Py_None)
callback = NULL;
list = GET_WEAKREFS_LISTPTR(ob);
get_basic_refs(*list, &ref, &proxy);
if (callback == NULL && type == &_PyWeakref_RefType) {
if (ref != NULL) {
/* We can re-use an existing reference. */
Py_INCREF(ref);
return (PyObject *)ref;
}
}
/* We have to create a new reference. */
/* Note: the tp_alloc() can trigger cyclic GC, so the weakref
list on ob can be mutated. This means that the ref and
proxy pointers we got back earlier may have been collected,
so we need to compute these values again before we use
them. */
self = (PyWeakReference *) (type->tp_alloc(type, 0));
if (self != NULL) {
init_weakref(self, ob, callback);
if (callback == NULL && type == &_PyWeakref_RefType) {
insert_head(self, list);
}
else {
PyWeakReference *prev;
get_basic_refs(*list, &ref, &proxy);
prev = (proxy == NULL) ? ref : proxy;
if (prev == NULL)
insert_head(self, list);
else
insert_after(self, prev);
}
}
}
return (PyObject *)self;
}
static int
weakref___init__(PyObject *self, PyObject *args, PyObject *kwargs)
{
PyObject *tmp;
if (parse_weakref_init_args("__init__", args, kwargs, &tmp, &tmp))
return 0;
else
return -1;
}
PyTypeObject
_PyWeakref_RefType = {
// Pyston change:
//PyVarObject_HEAD_INIT(&PyType_Type, 0)
PyVarObject_HEAD_INIT(NULL, 0)
"weakref",
sizeof(PyWeakReference),
0,
weakref_dealloc, /*tp_dealloc*/
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
0, /*tp_compare*/
(reprfunc)weakref_repr, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
(hashfunc)weakref_hash, /*tp_hash*/
(ternaryfunc)weakref_call, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_HAVE_RICHCOMPARE
| Py_TPFLAGS_BASETYPE, /*tp_flags*/
0, /*tp_doc*/
(traverseproc)gc_traverse, /*tp_traverse*/
(inquiry)gc_clear, /*tp_clear*/
(richcmpfunc)weakref_richcompare, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
0, /*tp_methods*/
0, /*tp_members*/
0, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
weakref___init__, /*tp_init*/
PyType_GenericAlloc, /*tp_alloc*/
weakref___new__, /*tp_new*/
PyObject_GC_Del, /*tp_free*/
};
static int
proxy_checkref(PyWeakReference *proxy)
{
if (PyWeakref_GET_OBJECT(proxy) == Py_None) {
PyErr_SetString(PyExc_ReferenceError,
"weakly-referenced object no longer exists");
return 0;
}
return 1;
}
/* If a parameter is a proxy, check that it is still "live" and wrap it,
* replacing the original value with the raw object. Raises ReferenceError
* if the param is a dead proxy.
*/
#define UNWRAP(o) \
if (PyWeakref_CheckProxy(o)) { \
if (!proxy_checkref((PyWeakReference *)o)) \
return NULL; \
o = PyWeakref_GET_OBJECT(o); \
}
#define UNWRAP_I(o) \
if (PyWeakref_CheckProxy(o)) { \
if (!proxy_checkref((PyWeakReference *)o)) \
return -1; \
o = PyWeakref_GET_OBJECT(o); \
}
#define WRAP_UNARY(method, generic) \
static PyObject * \
method(PyObject *proxy) { \
UNWRAP(proxy); \
return generic(proxy); \
}
#define WRAP_BINARY(method, generic) \
static PyObject * \
method(PyObject *x, PyObject *y) { \
UNWRAP(x); \
UNWRAP(y); \
return generic(x, y); \
}
/* Note that the third arg needs to be checked for NULL since the tp_call
* slot can receive NULL for this arg.
*/
#define WRAP_TERNARY(method, generic) \
static PyObject * \
method(PyObject *proxy, PyObject *v, PyObject *w) { \
UNWRAP(proxy); \
UNWRAP(v); \
if (w != NULL) \
UNWRAP(w); \
return generic(proxy, v, w); \
}
#define WRAP_METHOD(method, special) \
static PyObject * \
method(PyObject *proxy) { \
UNWRAP(proxy); \
return PyObject_CallMethod(proxy, special, ""); \
}
/* direct slots */
WRAP_BINARY(proxy_getattr, PyObject_GetAttr)
WRAP_UNARY(proxy_str, PyObject_Str)
WRAP_TERNARY(proxy_call, PyEval_CallObjectWithKeywords)
static PyObject *
proxy_repr(PyWeakReference *proxy)
{
char buf[160];
PyOS_snprintf(buf, sizeof(buf),
"<weakproxy at %p to %.100s at %p>", proxy,
Py_TYPE(PyWeakref_GET_OBJECT(proxy))->tp_name,
PyWeakref_GET_OBJECT(proxy));
return PyString_FromString(buf);
}
static int
proxy_setattr(PyWeakReference *proxy, PyObject *name, PyObject *value)
{
if (!proxy_checkref(proxy))
return -1;
return PyObject_SetAttr(PyWeakref_GET_OBJECT(proxy), name, value);
}
static int
proxy_compare(PyObject *proxy, PyObject *v)
{
UNWRAP_I(proxy);
UNWRAP_I(v);
return PyObject_Compare(proxy, v);
}
/* number slots */
WRAP_BINARY(proxy_add, PyNumber_Add)
WRAP_BINARY(proxy_sub, PyNumber_Subtract)
WRAP_BINARY(proxy_mul, PyNumber_Multiply)
WRAP_BINARY(proxy_div, PyNumber_Divide)
WRAP_BINARY(proxy_floor_div, PyNumber_FloorDivide)
WRAP_BINARY(proxy_true_div, PyNumber_TrueDivide)
WRAP_BINARY(proxy_mod, PyNumber_Remainder)
WRAP_BINARY(proxy_divmod, PyNumber_Divmod)
WRAP_TERNARY(proxy_pow, PyNumber_Power)
WRAP_UNARY(proxy_neg, PyNumber_Negative)
WRAP_UNARY(proxy_pos, PyNumber_Positive)
WRAP_UNARY(proxy_abs, PyNumber_Absolute)
WRAP_UNARY(proxy_invert, PyNumber_Invert)
WRAP_BINARY(proxy_lshift, PyNumber_Lshift)
WRAP_BINARY(proxy_rshift, PyNumber_Rshift)
WRAP_BINARY(proxy_and, PyNumber_And)
WRAP_BINARY(proxy_xor, PyNumber_Xor)
WRAP_BINARY(proxy_or, PyNumber_Or)
WRAP_UNARY(proxy_int, PyNumber_Int)
WRAP_UNARY(proxy_long, PyNumber_Long)
WRAP_UNARY(proxy_float, PyNumber_Float)
WRAP_BINARY(proxy_iadd, PyNumber_InPlaceAdd)
WRAP_BINARY(proxy_isub, PyNumber_InPlaceSubtract)
WRAP_BINARY(proxy_imul, PyNumber_InPlaceMultiply)
WRAP_BINARY(proxy_idiv, PyNumber_InPlaceDivide)
WRAP_BINARY(proxy_ifloor_div, PyNumber_InPlaceFloorDivide)
WRAP_BINARY(proxy_itrue_div, PyNumber_InPlaceTrueDivide)
WRAP_BINARY(proxy_imod, PyNumber_InPlaceRemainder)
WRAP_TERNARY(proxy_ipow, PyNumber_InPlacePower)
WRAP_BINARY(proxy_ilshift, PyNumber_InPlaceLshift)
WRAP_BINARY(proxy_irshift, PyNumber_InPlaceRshift)
WRAP_BINARY(proxy_iand, PyNumber_InPlaceAnd)
WRAP_BINARY(proxy_ixor, PyNumber_InPlaceXor)
WRAP_BINARY(proxy_ior, PyNumber_InPlaceOr)
WRAP_UNARY(proxy_index, PyNumber_Index)
static int
proxy_nonzero(PyWeakReference *proxy)
{
PyObject *o = PyWeakref_GET_OBJECT(proxy);
if (!proxy_checkref(proxy))
return -1;
return PyObject_IsTrue(o);
}
static void
proxy_dealloc(PyWeakReference *self)
{
if (self->wr_callback != NULL)
PyObject_GC_UnTrack((PyObject *)self);
clear_weakref(self);
PyObject_GC_Del(self);
}
/* sequence slots */
static PyObject *
proxy_slice(PyWeakReference *proxy, Py_ssize_t i, Py_ssize_t j)
{
if (!proxy_checkref(proxy))
return NULL;
return PySequence_GetSlice(PyWeakref_GET_OBJECT(proxy), i, j);
}
static int
proxy_ass_slice(PyWeakReference *proxy, Py_ssize_t i, Py_ssize_t j, PyObject *value)
{
if (!proxy_checkref(proxy))
return -1;
return PySequence_SetSlice(PyWeakref_GET_OBJECT(proxy), i, j, value);
}
static int
proxy_contains(PyWeakReference *proxy, PyObject *value)
{
if (!proxy_checkref(proxy))
return -1;
return PySequence_Contains(PyWeakref_GET_OBJECT(proxy), value);
}
/* mapping slots */
static Py_ssize_t
proxy_length(PyWeakReference *proxy)
{
if (!proxy_checkref(proxy))
return -1;
return PyObject_Length(PyWeakref_GET_OBJECT(proxy));
}
WRAP_BINARY(proxy_getitem, PyObject_GetItem)
static int
proxy_setitem(PyWeakReference *proxy, PyObject *key, PyObject *value)
{
if (!proxy_checkref(proxy))
return -1;
if (value == NULL)
return PyObject_DelItem(PyWeakref_GET_OBJECT(proxy), key);
else
return PyObject_SetItem(PyWeakref_GET_OBJECT(proxy), key, value);
}
/* iterator slots */
static PyObject *
proxy_iter(PyWeakReference *proxy)
{
if (!proxy_checkref(proxy))
return NULL;
return PyObject_GetIter(PyWeakref_GET_OBJECT(proxy));
}
static PyObject *
proxy_iternext(PyWeakReference *proxy)
{
if (!proxy_checkref(proxy))
return NULL;
return PyIter_Next(PyWeakref_GET_OBJECT(proxy));
}
WRAP_METHOD(proxy_unicode, "__unicode__");
static PyMethodDef proxy_methods[] = {
{"__unicode__", (PyCFunction)proxy_unicode, METH_NOARGS},
{NULL, NULL}
};
static PyNumberMethods proxy_as_number = {
proxy_add, /*nb_add*/
proxy_sub, /*nb_subtract*/
proxy_mul, /*nb_multiply*/
proxy_div, /*nb_divide*/
proxy_mod, /*nb_remainder*/
proxy_divmod, /*nb_divmod*/
proxy_pow, /*nb_power*/
proxy_neg, /*nb_negative*/
proxy_pos, /*nb_positive*/
proxy_abs, /*nb_absolute*/
(inquiry)proxy_nonzero, /*nb_nonzero*/
proxy_invert, /*nb_invert*/
proxy_lshift, /*nb_lshift*/
proxy_rshift, /*nb_rshift*/
proxy_and, /*nb_and*/
proxy_xor, /*nb_xor*/
proxy_or, /*nb_or*/
0, /*nb_coerce*/
proxy_int, /*nb_int*/
proxy_long, /*nb_long*/
proxy_float, /*nb_float*/
0, /*nb_oct*/
0, /*nb_hex*/
proxy_iadd, /*nb_inplace_add*/
proxy_isub, /*nb_inplace_subtract*/
proxy_imul, /*nb_inplace_multiply*/
proxy_idiv, /*nb_inplace_divide*/
proxy_imod, /*nb_inplace_remainder*/
proxy_ipow, /*nb_inplace_power*/
proxy_ilshift, /*nb_inplace_lshift*/
proxy_irshift, /*nb_inplace_rshift*/
proxy_iand, /*nb_inplace_and*/
proxy_ixor, /*nb_inplace_xor*/
proxy_ior, /*nb_inplace_or*/
proxy_floor_div, /*nb_floor_divide*/
proxy_true_div, /*nb_true_divide*/
proxy_ifloor_div, /*nb_inplace_floor_divide*/
proxy_itrue_div, /*nb_inplace_true_divide*/
proxy_index, /*nb_index*/
};
static PySequenceMethods proxy_as_sequence = {
(lenfunc)proxy_length, /*sq_length*/
0, /*sq_concat*/
0, /*sq_repeat*/
0, /*sq_item*/
(ssizessizeargfunc)proxy_slice, /*sq_slice*/
0, /*sq_ass_item*/
(ssizessizeobjargproc)proxy_ass_slice, /*sq_ass_slice*/
(objobjproc)proxy_contains, /* sq_contains */
};
static PyMappingMethods proxy_as_mapping = {
(lenfunc)proxy_length, /*mp_length*/
proxy_getitem, /*mp_subscript*/
(objobjargproc)proxy_setitem, /*mp_ass_subscript*/
};
PyTypeObject
_PyWeakref_ProxyType = {
// Pyston change:
//PyVarObject_HEAD_INIT(&PyType_Type, 0)
PyVarObject_HEAD_INIT(NULL, 0)
"weakproxy",
sizeof(PyWeakReference),
0,
/* methods */
(destructor)proxy_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
// Pyston change:
0, //proxy_compare, /* tp_compare */
(reprfunc)proxy_repr, /* tp_repr */
&proxy_as_number, /* tp_as_number */
&proxy_as_sequence, /* tp_as_sequence */
&proxy_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
proxy_str, /* tp_str */
// Pyston change:
0, //proxy_getattr, /* tp_getattro */
0, //(setattrofunc)proxy_setattr, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
| Py_TPFLAGS_CHECKTYPES, /* tp_flags */
0, /* tp_doc */
(traverseproc)gc_traverse, /* tp_traverse */
(inquiry)gc_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)proxy_iter, /* tp_iter */
(iternextfunc)proxy_iternext, /* tp_iternext */
proxy_methods, /* tp_methods */
};
PyTypeObject
_PyWeakref_CallableProxyType = {
// Pyston change:
//PyVarObject_HEAD_INIT(&PyType_Type, 0)
PyVarObject_HEAD_INIT(NULL, 0)
"weakcallableproxy",
sizeof(PyWeakReference),
0,
/* methods */
(destructor)proxy_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, //proxy_compare, /* tp_compare */
(unaryfunc)proxy_repr, /* tp_repr */
&proxy_as_number, /* tp_as_number */
&proxy_as_sequence, /* tp_as_sequence */
&proxy_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
proxy_call, /* tp_call */
proxy_str, /* tp_str */
// Pyston change:
0, //proxy_getattr, /* tp_getattro */
0, //(setattrofunc)proxy_setattr, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
| Py_TPFLAGS_CHECKTYPES, /* tp_flags */
0, /* tp_doc */
(traverseproc)gc_traverse, /* tp_traverse */
(inquiry)gc_clear, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)proxy_iter, /* tp_iter */
(iternextfunc)proxy_iternext, /* tp_iternext */
};
PyObject *
PyWeakref_NewRef(PyObject *ob, PyObject *callback)
{
PyWeakReference *result = NULL;
PyWeakReference **list;
PyWeakReference *ref, *proxy;
if (!PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
PyErr_Format(PyExc_TypeError,
"cannot create weak reference to '%s' object",
Py_TYPE(ob)->tp_name);
return NULL;
}
list = GET_WEAKREFS_LISTPTR(ob);
get_basic_refs(*list, &ref, &proxy);
if (callback == Py_None)
callback = NULL;
if (callback == NULL)
/* return existing weak reference if it exists */
result = ref;
if (result != NULL)
Py_INCREF(result);
else {
/* Note: new_weakref() can trigger cyclic GC, so the weakref
list on ob can be mutated. This means that the ref and
proxy pointers we got back earlier may have been collected,
so we need to compute these values again before we use
them. */
result = new_weakref(ob, callback);
if (result != NULL) {
get_basic_refs(*list, &ref, &proxy);
if (callback == NULL) {
if (ref == NULL)
insert_head(result, list);
else {
/* Someone else added a ref without a callback
during GC. Return that one instead of this one
to avoid violating the invariants of the list
of weakrefs for ob. */
Py_DECREF(result);
Py_INCREF(ref);
result = ref;
}
}
else {
PyWeakReference *prev;
prev = (proxy == NULL) ? ref : proxy;
if (prev == NULL)
insert_head(result, list);
else
insert_after(result, prev);
}
}
}
return (PyObject *) result;
}
PyObject *
PyWeakref_NewProxy(PyObject *ob, PyObject *callback)
{
PyWeakReference *result = NULL;
PyWeakReference **list;
PyWeakReference *ref, *proxy;
if (!PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
PyErr_Format(PyExc_TypeError,
"cannot create weak reference to '%s' object",
Py_TYPE(ob)->tp_name);
return NULL;
}
list = GET_WEAKREFS_LISTPTR(ob);
get_basic_refs(*list, &ref, &proxy);
if (callback == Py_None)
callback = NULL;
if (callback == NULL)
/* attempt to return an existing weak reference if it exists */
result = proxy;
if (result != NULL)
Py_INCREF(result);
else {
/* Note: new_weakref() can trigger cyclic GC, so the weakref
list on ob can be mutated. This means that the ref and
proxy pointers we got back earlier may have been collected,
so we need to compute these values again before we use
them. */
result = new_weakref(ob, callback);
if (result != NULL) {
PyWeakReference *prev;
if (PyCallable_Check(ob))
Py_TYPE(result) = &_PyWeakref_CallableProxyType;
else
Py_TYPE(result) = &_PyWeakref_ProxyType;
get_basic_refs(*list, &ref, &proxy);
if (callback == NULL) {
if (proxy != NULL) {
/* Someone else added a proxy without a callback
during GC. Return that one instead of this one
to avoid violating the invariants of the list
of weakrefs for ob. */
Py_DECREF(result);
Py_INCREF(result = proxy);
goto skip_insert;
}
prev = ref;
}
else
prev = (proxy == NULL) ? ref : proxy;
if (prev == NULL)
insert_head(result, list);
else
insert_after(result, prev);
skip_insert:
;
}
}
return (PyObject *) result;
}
PyObject *
PyWeakref_GetObject(PyObject *ref)
{
if (ref == NULL || !PyWeakref_Check(ref)) {
PyErr_BadInternalCall();
return NULL;
}
return PyWeakref_GET_OBJECT(ref);
}
/* Note that there's an inlined copy-paste of handle_callback() in gcmodule.c's
* handle_weakrefs().
*/
static void
handle_callback(PyWeakReference *ref, PyObject *callback)
{
PyObject *cbresult = PyObject_CallFunctionObjArgs(callback, ref, NULL);
if (cbresult == NULL)
PyErr_WriteUnraisable(callback);
else
Py_DECREF(cbresult);
}
/* This function is called by the tp_dealloc handler to clear weak references.
*
* This iterates through the weak references for 'object' and calls callbacks
* for those references which have one. It returns when all callbacks have
* been attempted.
*/
void
PyObject_ClearWeakRefs(PyObject *object)
{
PyWeakReference **list;
if (object == NULL
|| !PyType_SUPPORTS_WEAKREFS(Py_TYPE(object))
//|| object->ob_refcnt != 0
) {
PyErr_BadInternalCall();
return;
}
list = GET_WEAKREFS_LISTPTR(object);
/* Remove the callback-less basic and proxy references */
if (*list != NULL && (*list)->wr_callback == NULL) {
clear_weakref(*list);
if (*list != NULL && (*list)->wr_callback == NULL)
clear_weakref(*list);
}
if (*list != NULL) {
PyWeakReference *current = *list;
Py_ssize_t count = _PyWeakref_GetWeakrefCount(current);
int restore_error = PyErr_Occurred() ? 1 : 0;
PyObject *err_type, *err_value, *err_tb;
if (restore_error)
PyErr_Fetch(&err_type, &err_value, &err_tb);
if (count == 1) {
PyObject *callback = current->wr_callback;
current->wr_callback = NULL;
clear_weakref(current);
if (callback != NULL) {
// Pyston change:
// current is a stack reference to a GC allocated object. If it wasn't null when we fetched it from *list, it won't
// be collected, and we can trust that it's still valid here.
if (true /*current->ob_refcnt > 0*/)
handle_callback(current, callback);
Py_DECREF(callback);
}
}
else {
PyObject *tuple;
Py_ssize_t i = 0;
tuple = PyTuple_New(count * 2);
if (tuple == NULL) {
if (restore_error)
PyErr_Fetch(&err_type, &err_value, &err_tb);
return;
}
for (i = 0; i < count; ++i) {
PyWeakReference *next = current->wr_next;
// Pyston change:
// current is a stack reference to a GC allocated object. If it wasn't null when we fetched it from *list, it won't
// be collected, and we can trust that it's still valid here.
if (true /*current->ob_refcnt > 0*/)
{
Py_INCREF(current);
PyTuple_SET_ITEM(tuple, i * 2, (PyObject *) current);
PyTuple_SET_ITEM(tuple, i * 2 + 1, current->wr_callback);
}
else {
Py_DECREF(current->wr_callback);
}
current->wr_callback = NULL;
clear_weakref(current);
current = next;
}
for (i = 0; i < count; ++i) {
PyObject *callback = PyTuple_GET_ITEM(tuple, i * 2 + 1);
/* The tuple may have slots left to NULL */
if (callback != NULL) {
PyObject *item = PyTuple_GET_ITEM(tuple, i * 2);
handle_callback((PyWeakReference *)item, callback);
}
}
Py_DECREF(tuple);
}
if (restore_error)
PyErr_Restore(err_type, err_value, err_tb);
}
}
...@@ -268,4 +268,8 @@ extern "C" void Py_ReprLeave(PyObject* obj) noexcept { ...@@ -268,4 +268,8 @@ extern "C" void Py_ReprLeave(PyObject* obj) noexcept {
} }
} }
} }
extern "C" int PyObject_Compare(PyObject* o1, PyObject* o2) noexcept {
Py_FatalError("unimplemented");
}
} }
...@@ -457,7 +457,7 @@ public: ...@@ -457,7 +457,7 @@ public:
static_assert(offsetof(Box, cls) == offsetof(struct _object, ob_type), ""); static_assert(offsetof(Box, cls) == offsetof(struct _object, ob_type), "");
// Our default for tp_alloc: // Our default for tp_alloc:
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept; extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept;
#define DEFAULT_CLASS(default_cls) \ #define DEFAULT_CLASS(default_cls) \
void* operator new(size_t size, BoxedClass * cls) __attribute__((visibility("default"))) { \ void* operator new(size_t size, BoxedClass * cls) __attribute__((visibility("default"))) { \
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
#include "core/types.h" #include "core/types.h"
#include "core/util.h" #include "core/util.h"
#include "gc/heap.h" #include "gc/heap.h"
#include "runtime/objmodel.h"
#include "runtime/types.h" #include "runtime/types.h"
#ifndef NVALGRIND #ifndef NVALGRIND
...@@ -288,8 +289,10 @@ static void markPhase() { ...@@ -288,8 +289,10 @@ static void markPhase() {
#endif #endif
} }
static void sweepPhase() { static void sweepPhase(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
global_heap.freeUnmarked(); // we need to use the allocator here because these objects are referenced only here, and calling the weakref
// callbacks could start another gc
global_heap.freeUnmarked(weakly_referenced);
} }
static bool gc_enabled = true; static bool gc_enabled = true;
...@@ -316,7 +319,24 @@ void runCollection() { ...@@ -316,7 +319,24 @@ void runCollection() {
Timer _t("collecting", /*min_usec=*/10000); Timer _t("collecting", /*min_usec=*/10000);
markPhase(); markPhase();
sweepPhase(); std::list<Box*, StlCompatAllocator<Box*>> weakly_referenced;
sweepPhase(weakly_referenced);
for (auto o : weakly_referenced) {
PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(o);
while (PyWeakReference* head = *list) {
if (head->wr_object != Py_None) {
_PyWeakref_ClearRef(head);
if (head->wr_callback) {
runtimeCall(head->wr_callback, ArgPassSpec(1), reinterpret_cast<Box*>(head), NULL, NULL, NULL,
NULL);
head->wr_callback = NULL;
}
}
}
}
if (VERBOSITY("gc") >= 2) if (VERBOSITY("gc") >= 2)
printf("Collection #%d done\n\n", ncollections); printf("Collection #%d done\n\n", ncollections);
......
...@@ -25,9 +25,9 @@ ...@@ -25,9 +25,9 @@
#endif #endif
namespace pyston { namespace pyston {
namespace gc { namespace gc {
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) __attribute__((visibility("default")));
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) { extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
size_t alloc_bytes = bytes + sizeof(GCAllocation); size_t alloc_bytes = bytes + sizeof(GCAllocation);
...@@ -95,7 +95,6 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) { ...@@ -95,7 +95,6 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
return r; return r;
} }
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) __attribute__((visibility("default")));
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) { extern "C" inline void* gc_realloc(void* ptr, size_t bytes) {
// Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is: // Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is:
assert(ptr); assert(ptr);
...@@ -120,7 +119,6 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) { ...@@ -120,7 +119,6 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) {
#endif #endif
} }
extern "C" inline void gc_free(void* ptr) __attribute__((visibility("default")));
extern "C" inline void gc_free(void* ptr) { extern "C" inline void gc_free(void* ptr) {
assert(ptr); assert(ptr);
#ifndef NVALGRIND #ifndef NVALGRIND
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
namespace pyston { namespace pyston {
namespace gc { namespace gc {
void _doFree(GCAllocation* al); bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced);
// lots of linked lists around here, so let's just use template functions for operations on them. // lots of linked lists around here, so let's just use template functions for operations on them.
template <class ListT> inline void nullNextPrev(ListT* node) { template <class ListT> inline void nullNextPrev(ListT* node) {
...@@ -75,7 +75,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func ...@@ -75,7 +75,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func
} }
} }
template <class ListT, typename Free> inline void sweepList(ListT* head, Free free_func) { template <class ListT, typename Free>
inline void sweepList(ListT* head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced, Free free_func) {
auto cur = head; auto cur = head;
while (cur) { while (cur) {
GCAllocation* al = cur->data; GCAllocation* al = cur->data;
...@@ -83,7 +84,7 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr ...@@ -83,7 +84,7 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr
clearMark(al); clearMark(al);
cur = cur->next; cur = cur->next;
} else { } else {
_doFree(al); if (_doFree(al, &weakly_referenced)) {
removeFromLL(cur); removeFromLL(cur);
...@@ -92,6 +93,7 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr ...@@ -92,6 +93,7 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr
free_func(to_free); free_func(to_free);
} }
} }
}
} }
static unsigned bytesAllocatedSinceCollection; static unsigned bytesAllocatedSinceCollection;
...@@ -124,7 +126,7 @@ void registerGCManagedBytes(size_t bytes) { ...@@ -124,7 +126,7 @@ void registerGCManagedBytes(size_t bytes) {
Heap global_heap; Heap global_heap;
void _doFree(GCAllocation* al) { bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced) {
if (VERBOSITY() >= 2) if (VERBOSITY() >= 2)
printf("Freeing %p\n", al->user_data); printf("Freeing %p\n", al->user_data);
...@@ -145,14 +147,24 @@ void _doFree(GCAllocation* al) { ...@@ -145,14 +147,24 @@ void _doFree(GCAllocation* al) {
VALGRIND_ENABLE_ERROR_REPORTING; VALGRIND_ENABLE_ERROR_REPORTING;
#endif #endif
if (PyType_SUPPORTS_WEAKREFS(b->cls)) {
PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(b);
if (list && *list) {
assert(weakly_referenced && "attempting to free a weakly referenced object manually");
weakly_referenced->push_back(b);
return false;
}
}
ASSERT(b->cls->tp_dealloc == NULL, "%s", getTypeName(b)); ASSERT(b->cls->tp_dealloc == NULL, "%s", getTypeName(b));
if (b->cls->simple_destructor) if (b->cls->simple_destructor)
b->cls->simple_destructor(b); b->cls->simple_destructor(b);
} }
return true;
} }
void Heap::destructContents(GCAllocation* al) { void Heap::destructContents(GCAllocation* al) {
_doFree(al); _doFree(al, NULL);
} }
struct HeapStatistics { struct HeapStatistics {
...@@ -281,8 +293,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) { ...@@ -281,8 +293,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) {
return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]); return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]);
} }
void SmallArena::freeUnmarked() { void SmallArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
thread_caches.forEachValue([this](ThreadBlockCache* cache) { thread_caches.forEachValue([this, &weakly_referenced](ThreadBlockCache* cache) {
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block* h = cache->cache_free_heads[bidx]; Block* h = cache->cache_free_heads[bidx];
// Try to limit the amount of unused memory a thread can hold onto; // Try to limit the amount of unused memory a thread can hold onto;
...@@ -302,8 +314,8 @@ void SmallArena::freeUnmarked() { ...@@ -302,8 +314,8 @@ void SmallArena::freeUnmarked() {
insertIntoLL(&heads[bidx], h); insertIntoLL(&heads[bidx], h);
} }
Block** chain_end = _freeChain(&cache->cache_free_heads[bidx]); Block** chain_end = _freeChain(&cache->cache_free_heads[bidx], weakly_referenced);
_freeChain(&cache->cache_full_heads[bidx]); _freeChain(&cache->cache_full_heads[bidx], weakly_referenced);
while (Block* b = cache->cache_full_heads[bidx]) { while (Block* b = cache->cache_full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -313,8 +325,8 @@ void SmallArena::freeUnmarked() { ...@@ -313,8 +325,8 @@ void SmallArena::freeUnmarked() {
}); });
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block** chain_end = _freeChain(&heads[bidx]); Block** chain_end = _freeChain(&heads[bidx], weakly_referenced);
_freeChain(&full_heads[bidx]); _freeChain(&full_heads[bidx], weakly_referenced);
while (Block* b = full_heads[bidx]) { while (Block* b = full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -341,7 +353,7 @@ void SmallArena::getStatistics(HeapStatistics* stats) { ...@@ -341,7 +353,7 @@ void SmallArena::getStatistics(HeapStatistics* stats) {
} }
SmallArena::Block** SmallArena::_freeChain(Block** head) { SmallArena::Block** SmallArena::_freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
while (Block* b = *head) { while (Block* b = *head) {
int num_objects = b->numObjects(); int num_objects = b->numObjects();
int first_obj = b->minObjIndex(); int first_obj = b->minObjIndex();
...@@ -359,9 +371,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head) { ...@@ -359,9 +371,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head) {
if (isMarked(al)) { if (isMarked(al)) {
clearMark(al); clearMark(al);
} else { } else {
_doFree(al); if (_doFree(al, &weakly_referenced))
// assert(p != (void*)0x127000d960); // the main module
b->isfree.set(atom_idx); b->isfree.set(atom_idx);
} }
} }
...@@ -568,8 +578,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) { ...@@ -568,8 +578,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void LargeArena::freeUnmarked() { void LargeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
sweepList(head, [this](LargeObj* ptr) { _freeLargeObj(ptr); }); sweepList(head, weakly_referenced, [this](LargeObj* ptr) { _freeLargeObj(ptr); });
} }
void LargeArena::getStatistics(HeapStatistics* stats) { void LargeArena::getStatistics(HeapStatistics* stats) {
...@@ -760,8 +770,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) { ...@@ -760,8 +770,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void HugeArena::freeUnmarked() { void HugeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
sweepList(head, [this](HugeObj* ptr) { _freeHugeObj(ptr); }); sweepList(head, weakly_referenced, [this](HugeObj* ptr) { _freeHugeObj(ptr); });
} }
void HugeArena::getStatistics(HeapStatistics* stats) { void HugeArena::getStatistics(HeapStatistics* stats) {
......
...@@ -17,12 +17,61 @@ ...@@ -17,12 +17,61 @@
#include <cstddef> #include <cstddef>
#include <cstdint> #include <cstdint>
#include <list>
#include <sys/mman.h> #include <sys/mman.h>
#include "core/common.h" #include "core/common.h"
#include "core/threading.h" #include "core/threading.h"
namespace pyston { namespace pyston {
namespace gc {
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) __attribute__((visibility("default")));
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) __attribute__((visibility("default")));
extern "C" inline void gc_free(void* ptr) __attribute__((visibility("default")));
}
template <class T> class StlCompatAllocator {
public:
typedef size_t size_type;
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef std::ptrdiff_t difference_type;
StlCompatAllocator() {}
template <class U> StlCompatAllocator(const StlCompatAllocator<U>& other) {}
template <class U> struct rebind { typedef StlCompatAllocator<U> other; };
pointer allocate(size_t n) {
size_t to_allocate = n * sizeof(value_type);
// assert(to_allocate < (1<<16));
return reinterpret_cast<pointer>(gc_alloc(to_allocate, gc::GCKind::CONSERVATIVE));
}
void deallocate(pointer p, size_t n) { gc::gc_free(p); }
// I would never be able to come up with this on my own:
// http://en.cppreference.com/w/cpp/memory/allocator/construct
template <class U, class... Args> void construct(U* p, Args&&... args) {
::new ((void*)p) U(std::forward<Args>(args)...);
}
template <class U> void destroy(U* p) { p->~U(); }
bool operator==(const StlCompatAllocator<T>& rhs) const { return true; }
bool operator!=(const StlCompatAllocator<T>& rhs) const { return false; }
};
template <typename K, typename V, typename Hash = std::hash<K>, typename KeyEqual = std::equal_to<K>>
class conservative_unordered_map
: public std::unordered_map<K, V, Hash, KeyEqual, StlCompatAllocator<std::pair<const K, V>>> {};
namespace gc { namespace gc {
// Notify the gc of n bytes as being under GC management. // Notify the gc of n bytes as being under GC management.
...@@ -146,7 +195,7 @@ public: ...@@ -146,7 +195,7 @@ public:
void free(GCAllocation* al); void free(GCAllocation* al);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -277,7 +326,7 @@ private: ...@@ -277,7 +326,7 @@ private:
Block* _allocBlock(uint64_t size, Block** prev); Block* _allocBlock(uint64_t size, Block** prev);
GCAllocation* _allocFromBlock(Block* b); GCAllocation* _allocFromBlock(Block* b);
Block* _claimBlock(size_t rounded_size, Block** free_head); Block* _claimBlock(size_t rounded_size, Block** free_head);
Block** _freeChain(Block** head); Block** _freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void _getChainStatistics(HeapStatistics* stats, Block** head); void _getChainStatistics(HeapStatistics* stats, Block** head);
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx); GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
...@@ -350,7 +399,7 @@ public: ...@@ -350,7 +399,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
}; };
...@@ -368,7 +417,7 @@ public: ...@@ -368,7 +417,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -475,10 +524,10 @@ public: ...@@ -475,10 +524,10 @@ public:
return NULL; return NULL;
} }
// not thread safe: // not thread safe:
void freeUnmarked() { void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
small_arena.freeUnmarked(); small_arena.freeUnmarked(weakly_referenced);
large_arena.freeUnmarked(); large_arena.freeUnmarked(weakly_referenced);
huge_arena.freeUnmarked(); huge_arena.freeUnmarked(weakly_referenced);
} }
void dumpHeapStatistics(); void dumpHeapStatistics();
......
...@@ -426,10 +426,6 @@ extern "C" PyObject* PyObject_Call(PyObject* callable_object, PyObject* args, Py ...@@ -426,10 +426,6 @@ extern "C" PyObject* PyObject_Call(PyObject* callable_object, PyObject* args, Py
} }
} }
extern "C" void PyObject_ClearWeakRefs(PyObject* object) noexcept {
Py_FatalError("unimplemented");
}
extern "C" int PyObject_GetBuffer(PyObject* obj, Py_buffer* view, int flags) noexcept { extern "C" int PyObject_GetBuffer(PyObject* obj, Py_buffer* view, int flags) noexcept {
if (!PyObject_CheckBuffer(obj)) { if (!PyObject_CheckBuffer(obj)) {
printf("%s\n", obj->cls->tp_name); printf("%s\n", obj->cls->tp_name);
......
...@@ -301,6 +301,7 @@ BoxedClass::BoxedClass(BoxedClass* base, gcvisit_func gc_visit, int attrs_offset ...@@ -301,6 +301,7 @@ BoxedClass::BoxedClass(BoxedClass* base, gcvisit_func gc_visit, int attrs_offset
tp_flags |= Py_TPFLAGS_BASETYPE; tp_flags |= Py_TPFLAGS_BASETYPE;
tp_flags |= Py_TPFLAGS_HAVE_CLASS; tp_flags |= Py_TPFLAGS_HAVE_CLASS;
tp_flags |= Py_TPFLAGS_HAVE_GC; tp_flags |= Py_TPFLAGS_HAVE_GC;
tp_flags |= Py_TPFLAGS_HAVE_WEAKREFS;
tp_base = base; tp_base = base;
......
...@@ -66,6 +66,7 @@ extern "C" void init_codecs(); ...@@ -66,6 +66,7 @@ extern "C" void init_codecs();
extern "C" void init_socket(); extern "C" void init_socket();
extern "C" void _PyUnicode_Init(); extern "C" void _PyUnicode_Init();
extern "C" void initunicodedata(); extern "C" void initunicodedata();
extern "C" void init_weakref();
namespace pyston { namespace pyston {
...@@ -78,7 +79,7 @@ bool IN_SHUTDOWN = false; ...@@ -78,7 +79,7 @@ bool IN_SHUTDOWN = false;
#define SLICE_STEP_OFFSET ((char*)&(((BoxedSlice*)0x01)->step) - (char*)0x1) #define SLICE_STEP_OFFSET ((char*)&(((BoxedSlice*)0x01)->step) - (char*)0x1)
// Analogue of PyType_GenericAlloc (default tp_alloc), but should only be used for Pyston classes! // Analogue of PyType_GenericAlloc (default tp_alloc), but should only be used for Pyston classes!
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept { extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept {
assert(cls); assert(cls);
RELEASE_ASSERT(nitems == 0, ""); RELEASE_ASSERT(nitems == 0, "");
RELEASE_ASSERT(cls->tp_itemsize == 0, ""); RELEASE_ASSERT(cls->tp_itemsize == 0, "");
...@@ -244,7 +245,8 @@ Box* Box::nextIC() { ...@@ -244,7 +245,8 @@ Box* Box::nextIC() {
std::string builtinStr("__builtin__"); std::string builtinStr("__builtin__");
extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f) extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f)
: f(f), closure(NULL), isGenerator(false), ndefaults(0), defaults(NULL), modname(NULL), name(NULL) { : in_weakreflist(NULL), f(f), closure(NULL), isGenerator(false), ndefaults(0), defaults(NULL), modname(NULL),
name(NULL) {
if (f->source) { if (f->source) {
this->modname = f->source->parent_module->getattr("__name__", NULL); this->modname = f->source->parent_module->getattr("__name__", NULL);
} else { } else {
...@@ -258,7 +260,8 @@ extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f) ...@@ -258,7 +260,8 @@ extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f)
extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f, std::initializer_list<Box*> defaults, extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f, std::initializer_list<Box*> defaults,
BoxedClosure* closure, bool isGenerator) BoxedClosure* closure, bool isGenerator)
: f(f), closure(closure), isGenerator(isGenerator), ndefaults(0), defaults(NULL), modname(NULL), name(NULL) { : in_weakreflist(NULL), f(f), closure(closure), isGenerator(isGenerator), ndefaults(0), defaults(NULL),
modname(NULL), name(NULL) {
if (defaults.size()) { if (defaults.size()) {
// make sure to initialize defaults first, since the GC behavior is triggered by ndefaults, // make sure to initialize defaults first, since the GC behavior is triggered by ndefaults,
// and a GC can happen within this constructor: // and a GC can happen within this constructor:
...@@ -485,6 +488,23 @@ extern "C" void sliceGCHandler(GCVisitor* v, Box* b) { ...@@ -485,6 +488,23 @@ extern "C" void sliceGCHandler(GCVisitor* v, Box* b) {
v->visit(sl->step); v->visit(sl->step);
} }
static int call_gc_visit(PyObject* val, void* arg) {
if (val) {
GCVisitor* v = static_cast<GCVisitor*>(arg);
v->visit(val);
}
return 0;
}
static void proxy_to_tp_traverse(GCVisitor* v, Box* b) {
boxGCHandler(v, b);
b->cls->tp_traverse(b, call_gc_visit, v);
}
static void proxy_to_tp_clear(Box* b) {
b->cls->tp_clear(b);
}
// This probably belongs in tuple.cpp? // This probably belongs in tuple.cpp?
extern "C" void tupleGCHandler(GCVisitor* v, Box* b) { extern "C" void tupleGCHandler(GCVisitor* v, Box* b) {
boxGCHandler(v, b); boxGCHandler(v, b);
...@@ -1257,13 +1277,18 @@ void setupRuntime() { ...@@ -1257,13 +1277,18 @@ void setupRuntime() {
float_cls = new BoxedHeapClass(object_cls, NULL, 0, sizeof(BoxedFloat), false, "float"); float_cls = new BoxedHeapClass(object_cls, NULL, 0, sizeof(BoxedFloat), false, "float");
function_cls = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedFunction, attrs), function_cls = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedFunction, attrs),
sizeof(BoxedFunction), false, "function"); sizeof(BoxedFunction), false, "function");
function_cls->tp_weaklistoffset = offsetof(BoxedFunction, in_weakreflist);
builtin_function_or_method_cls builtin_function_or_method_cls
= new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedBuiltinFunctionOrMethod, attrs), = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedBuiltinFunctionOrMethod, attrs),
sizeof(BoxedBuiltinFunctionOrMethod), false, "builtin_function_or_method"); sizeof(BoxedBuiltinFunctionOrMethod), false, "builtin_function_or_method");
builtin_function_or_method_cls->tp_weaklistoffset = offsetof(BoxedBuiltinFunctionOrMethod, in_weakreflist);
function_cls->simple_destructor = builtin_function_or_method_cls->simple_destructor = functionDtor; function_cls->simple_destructor = builtin_function_or_method_cls->simple_destructor = functionDtor;
instancemethod_cls = new BoxedHeapClass(object_cls, &instancemethodGCHandler, 0, sizeof(BoxedInstanceMethod), false, instancemethod_cls = new BoxedHeapClass(object_cls, &instancemethodGCHandler, 0, sizeof(BoxedInstanceMethod), false,
"instancemethod"); "instancemethod");
instancemethod_cls->tp_weaklistoffset = offsetof(BoxedInstanceMethod, in_weakreflist);
list_cls = new BoxedHeapClass(object_cls, &listGCHandler, 0, sizeof(BoxedList), false, "list"); list_cls = new BoxedHeapClass(object_cls, &listGCHandler, 0, sizeof(BoxedList), false, "list");
slice_cls = new BoxedHeapClass(object_cls, &sliceGCHandler, 0, sizeof(BoxedSlice), false, "slice"); slice_cls = new BoxedHeapClass(object_cls, &sliceGCHandler, 0, sizeof(BoxedSlice), false, "slice");
dict_cls = new BoxedHeapClass(object_cls, &dictGCHandler, 0, sizeof(BoxedDict), false, "dict"); dict_cls = new BoxedHeapClass(object_cls, &dictGCHandler, 0, sizeof(BoxedDict), false, "dict");
...@@ -1447,6 +1472,26 @@ void setupRuntime() { ...@@ -1447,6 +1472,26 @@ void setupRuntime() {
init_codecs(); init_codecs();
init_socket(); init_socket();
initunicodedata(); initunicodedata();
init_weakref();
// some additional setup to ensure weakrefs participate in our GC
BoxedClass* weakref_ref_cls = &_PyWeakref_RefType;
weakref_ref_cls->tp_alloc = PystonType_GenericAlloc;
weakref_ref_cls->gc_visit = proxy_to_tp_traverse;
weakref_ref_cls->simple_destructor = proxy_to_tp_clear;
weakref_ref_cls->is_pyston_class = true;
BoxedClass* weakref_proxy_cls = &_PyWeakref_ProxyType;
weakref_proxy_cls->tp_alloc = PystonType_GenericAlloc;
weakref_proxy_cls->gc_visit = proxy_to_tp_traverse;
weakref_proxy_cls->simple_destructor = proxy_to_tp_clear;
weakref_proxy_cls->is_pyston_class = true;
BoxedClass* weakref_callableproxy = &_PyWeakref_CallableProxyType;
weakref_callableproxy->tp_alloc = PystonType_GenericAlloc;
weakref_callableproxy->gc_visit = proxy_to_tp_traverse;
weakref_callableproxy->simple_destructor = proxy_to_tp_clear;
weakref_callableproxy->is_pyston_class = true;
setupSysEnd(); setupSysEnd();
......
...@@ -134,46 +134,6 @@ Box* objectStr(Box*); ...@@ -134,46 +134,6 @@ Box* objectStr(Box*);
Box* objectRepr(Box*); Box* objectRepr(Box*);
template <class T> class StlCompatAllocator {
public:
typedef size_t size_type;
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef std::ptrdiff_t difference_type;
StlCompatAllocator() {}
template <class U> StlCompatAllocator(const StlCompatAllocator<U>& other) {}
template <class U> struct rebind { typedef StlCompatAllocator<U> other; };
pointer allocate(size_t n) {
size_t to_allocate = n * sizeof(value_type);
// assert(to_allocate < (1<<16));
return reinterpret_cast<pointer>(gc_alloc(to_allocate, gc::GCKind::CONSERVATIVE));
}
void deallocate(pointer p, size_t n) { gc::gc_free(p); }
// I would never be able to come up with this on my own:
// http://en.cppreference.com/w/cpp/memory/allocator/construct
template <class U, class... Args> void construct(U* p, Args&&... args) {
::new ((void*)p) U(std::forward<Args>(args)...);
}
template <class U> void destroy(U* p) { p->~U(); }
bool operator==(const StlCompatAllocator<T>& rhs) const { return true; }
bool operator!=(const StlCompatAllocator<T>& rhs) const { return false; }
};
template <typename K, typename V, typename Hash = std::hash<K>, typename KeyEqual = std::equal_to<K>>
class conservative_unordered_map
: public std::unordered_map<K, V, Hash, KeyEqual, StlCompatAllocator<std::pair<const K, V>>> {};
class BoxedClass : public BoxVar { class BoxedClass : public BoxVar {
public: public:
typedef void (*gcvisit_func)(GCVisitor*, Box*); typedef void (*gcvisit_func)(GCVisitor*, Box*);
...@@ -363,10 +323,13 @@ class BoxedUnicode : public Box { ...@@ -363,10 +323,13 @@ class BoxedUnicode : public Box {
class BoxedInstanceMethod : public Box { class BoxedInstanceMethod : public Box {
public: public:
Box** in_weakreflist;
// obj is NULL for unbound instancemethod // obj is NULL for unbound instancemethod
Box* obj, *func; Box* obj, *func;
BoxedInstanceMethod(Box* obj, Box* func) __attribute__((visibility("default"))) : obj(obj), func(func) {} BoxedInstanceMethod(Box* obj, Box* func) __attribute__((visibility("default")))
: in_weakreflist(NULL), obj(obj), func(func) {}
DEFAULT_CLASS_SIMPLE(instancemethod_cls); DEFAULT_CLASS_SIMPLE(instancemethod_cls);
}; };
...@@ -448,6 +411,8 @@ static_assert(sizeof(BoxedDict) == sizeof(PyDictObject), ""); ...@@ -448,6 +411,8 @@ static_assert(sizeof(BoxedDict) == sizeof(PyDictObject), "");
class BoxedFunctionBase : public Box { class BoxedFunctionBase : public Box {
public: public:
Box** in_weakreflist;
HCAttrs attrs; HCAttrs attrs;
CLFunction* f; CLFunction* f;
BoxedClosure* closure; BoxedClosure* closure;
...@@ -639,6 +604,6 @@ Box* makeAttrWrapper(Box* b); ...@@ -639,6 +604,6 @@ Box* makeAttrWrapper(Box* b);
#define OverflowError ((BoxedClass*)PyExc_OverflowError) #define OverflowError ((BoxedClass*)PyExc_OverflowError)
// Our default for tp_alloc: // Our default for tp_alloc:
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept; extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept;
} }
#endif #endif
import weakref
import gc
def cb(wr):
print "object was destroyed", wr()
def doStuff():
def meth():
pass
wr = weakref.ref(meth, cb)
return wr
w = doStuff()
gc.collect()
import weakref
def test_wr(o):
try:
r = weakref.ref(o)
print "passed", type(o)
return r
except:
print "failed", type(o)
def test():
1
wr = test_wr(test)
print wr() == test
#print weakref.getweakrefs(test)[0] == wr
print weakref.getweakrefcount(test)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment