Commit 52d1d182 authored by Chris Toshok's avatar Chris Toshok

get weakrefs in and working for functions/instancemethods

bring in the cpython implementation of weakrefs, and post init_weakref()
overwrite some fields in the BoxedClass's for weakref.ref/proxy/callableproxy
so that they participate in our GC (we also make use of their tp_traverse
functions for scanning and tp_clear functions as their simple_destructor.)

as we sweep the heap:

1) any unreachable objects that have weakreferences are kept alive
   and placed in an std::list called weakly_referenced.

2) any unreachable weakref.ref objects are cleared and removed from
   their referent's list.

After sweeping the entire heap, we then loop over the objects in
weakly_referenced.  If an object in the list still has weak references,
we loop over them clearing their target (setting it to None), and
calling their callback if they have one.  test/tests/weakref1.py tests this.
parent 21a98b05
...@@ -54,3 +54,5 @@ compile.log ...@@ -54,3 +54,5 @@ compile.log
*.swo *.swo
*.out *.out
*~
...@@ -291,8 +291,8 @@ STDLIB_OBJS := stdlib.bc.o stdlib.stripped.bc.o ...@@ -291,8 +291,8 @@ STDLIB_OBJS := stdlib.bc.o stdlib.stripped.bc.o
STDLIB_RELEASE_OBJS := stdlib.release.bc.o STDLIB_RELEASE_OBJS := stdlib.release.bc.o
ASM_SRCS := $(wildcard src/runtime/*.S) ASM_SRCS := $(wildcard src/runtime/*.S)
STDMODULE_SRCS := errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c $(EXTRA_STDMODULE_SRCS) STDMODULE_SRCS := errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c _weakref.c $(EXTRA_STDMODULE_SRCS)
STDOBJECT_SRCS := structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c $(EXTRA_STDOBJECT_SRCS) STDOBJECT_SRCS := structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c weakrefobject.c $(EXTRA_STDOBJECT_SRCS)
STDPYTHON_SRCS := pyctype.c getargs.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c $(EXTRA_STDPYTHON_SRCS) STDPYTHON_SRCS := pyctype.c getargs.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c $(EXTRA_STDPYTHON_SRCS)
FROM_CPYTHON_SRCS := $(addprefix from_cpython/Modules/,$(STDMODULE_SRCS)) $(addprefix from_cpython/Objects/,$(STDOBJECT_SRCS)) $(addprefix from_cpython/Python/,$(STDPYTHON_SRCS)) FROM_CPYTHON_SRCS := $(addprefix from_cpython/Modules/,$(STDMODULE_SRCS)) $(addprefix from_cpython/Objects/,$(STDOBJECT_SRCS)) $(addprefix from_cpython/Python/,$(STDPYTHON_SRCS))
......
...@@ -15,10 +15,10 @@ endforeach(STDLIB_FILE) ...@@ -15,10 +15,10 @@ endforeach(STDLIB_FILE)
add_custom_target(copy_stdlib ALL DEPENDS ${STDLIB_TARGETS}) add_custom_target(copy_stdlib ALL DEPENDS ${STDLIB_TARGETS})
# compile specified files in from_cpython/Modules # compile specified files in from_cpython/Modules
file(GLOB_RECURSE STDMODULE_SRCS Modules errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c) file(GLOB_RECURSE STDMODULE_SRCS Modules errnomodule.c shamodule.c sha256module.c sha512module.c _math.c mathmodule.c md5.c md5module.c _randommodule.c _sre.c operator.c binascii.c pwdmodule.c posixmodule.c _struct.c datetimemodule.c _functoolsmodule.c _collectionsmodule.c itertoolsmodule.c resource.c signalmodule.c selectmodule.c fcntlmodule.c timemodule.c arraymodule.c zlibmodule.c _codecsmodule.c socketmodule.c unicodedata.c _weakref.c)
# compile specified files in from_cpython/Objects # compile specified files in from_cpython/Objects
file(GLOB_RECURSE STDOBJECT_SRCS Objects structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c) file(GLOB_RECURSE STDOBJECT_SRCS Objects structseq.c capsule.c stringobject.c exceptions.c unicodeobject.c unicodectype.c bytearrayobject.c bytes_methods.c weakrefobject.c)
# compile specified files in from_cpython/Python # compile specified files in from_cpython/Python
file(GLOB_RECURSE STDPYTHON_SRCS Python getargs.c pyctype.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c) file(GLOB_RECURSE STDPYTHON_SRCS Python getargs.c pyctype.c formatter_string.c pystrtod.c dtoa.c formatter_unicode.c)
......
...@@ -73,6 +73,7 @@ ...@@ -73,6 +73,7 @@
#include "iterobject.h" #include "iterobject.h"
#include "descrobject.h" #include "descrobject.h"
#include "warnings.h" #include "warnings.h"
#include "weakrefobject.h"
#include "codecs.h" #include "codecs.h"
#include "pyerrors.h" #include "pyerrors.h"
......
/* Weak references objects for Python. */
#ifndef Py_WEAKREFOBJECT_H
#define Py_WEAKREFOBJECT_H
#ifdef __cplusplus
extern "C" {
#endif
typedef struct _PyWeakReference PyWeakReference;
/* PyWeakReference is the base struct for the Python ReferenceType, ProxyType,
* and CallableProxyType.
*/
struct _PyWeakReference {
PyObject_HEAD
/* The object to which this is a weak reference, or Py_None if none.
* Note that this is a stealth reference: wr_object's refcount is
* not incremented to reflect this pointer.
*/
PyObject *wr_object;
/* A callable to invoke when wr_object dies, or NULL if none. */
PyObject *wr_callback;
/* A cache for wr_object's hash code. As usual for hashes, this is -1
* if the hash code isn't known yet.
*/
long hash;
/* If wr_object is weakly referenced, wr_object has a doubly-linked NULL-
* terminated list of weak references to it. These are the list pointers.
* If wr_object goes away, wr_object is set to Py_None, and these pointers
* have no meaning then.
*/
PyWeakReference *wr_prev;
PyWeakReference *wr_next;
};
PyAPI_DATA(PyTypeObject) _PyWeakref_RefType;
PyAPI_DATA(PyTypeObject) _PyWeakref_ProxyType;
PyAPI_DATA(PyTypeObject) _PyWeakref_CallableProxyType;
#define PyWeakref_CheckRef(op) PyObject_TypeCheck(op, &_PyWeakref_RefType)
#define PyWeakref_CheckRefExact(op) \
(Py_TYPE(op) == &_PyWeakref_RefType)
#define PyWeakref_CheckProxy(op) \
((Py_TYPE(op) == &_PyWeakref_ProxyType) || \
(Py_TYPE(op) == &_PyWeakref_CallableProxyType))
#define PyWeakref_Check(op) \
(PyWeakref_CheckRef(op) || PyWeakref_CheckProxy(op))
PyAPI_FUNC(PyObject *) PyWeakref_NewRef(PyObject *ob,
PyObject *callback) PYSTON_NOEXCEPT;
PyAPI_FUNC(PyObject *) PyWeakref_NewProxy(PyObject *ob,
PyObject *callback) PYSTON_NOEXCEPT;
PyAPI_FUNC(PyObject *) PyWeakref_GetObject(PyObject *ref) PYSTON_NOEXCEPT;
PyAPI_FUNC(Py_ssize_t) _PyWeakref_GetWeakrefCount(PyWeakReference *head) PYSTON_NOEXCEPT;
PyAPI_FUNC(void) _PyWeakref_ClearRef(PyWeakReference *self) PYSTON_NOEXCEPT;
/* Explanation for the Py_REFCNT() check: when a weakref's target is part
of a long chain of deallocations which triggers the trashcan mechanism,
clearing the weakrefs can be delayed long after the target's refcount
has dropped to zero. In the meantime, code accessing the weakref will
be able to "see" the target object even though it is supposed to be
unreachable. See issue #16602. */
/*
#define PyWeakref_GET_OBJECT(ref) \
(Py_REFCNT(((PyWeakReference *)(ref))->wr_object) > 0 \
? ((PyWeakReference *)(ref))->wr_object \
: Py_None)
*/
/* pyston version below. we'll need to worry about ensuring we keep
the same semantics of weakrefs being cleared before destructors
fire */
#define PyWeakref_GET_OBJECT(ref) ((PyWeakReference *)(ref))->wr_object
#ifdef __cplusplus
}
#endif
#endif /* !Py_WEAKREFOBJECT_H */
...@@ -96,6 +96,14 @@ init_weakref(void) ...@@ -96,6 +96,14 @@ init_weakref(void)
m = Py_InitModule3("_weakref", weakref_functions, m = Py_InitModule3("_weakref", weakref_functions,
"Weak-reference support module."); "Weak-reference support module.");
if (m != NULL) { if (m != NULL) {
// Pyston change: call PyType_Ready on our types
if (PyType_Ready(&_PyWeakref_RefType) < 0)
return;
if (PyType_Ready(&_PyWeakref_ProxyType) < 0)
return;
if (PyType_Ready(&_PyWeakref_CallableProxyType) < 0)
return;
Py_INCREF(&_PyWeakref_RefType); Py_INCREF(&_PyWeakref_RefType);
PyModule_AddObject(m, "ref", PyModule_AddObject(m, "ref",
(PyObject *) &_PyWeakref_RefType); (PyObject *) &_PyWeakref_RefType);
......
This diff is collapsed.
...@@ -268,4 +268,8 @@ extern "C" void Py_ReprLeave(PyObject* obj) noexcept { ...@@ -268,4 +268,8 @@ extern "C" void Py_ReprLeave(PyObject* obj) noexcept {
} }
} }
} }
extern "C" int PyObject_Compare(PyObject* o1, PyObject* o2) noexcept {
Py_FatalError("unimplemented");
}
} }
...@@ -457,7 +457,7 @@ public: ...@@ -457,7 +457,7 @@ public:
static_assert(offsetof(Box, cls) == offsetof(struct _object, ob_type), ""); static_assert(offsetof(Box, cls) == offsetof(struct _object, ob_type), "");
// Our default for tp_alloc: // Our default for tp_alloc:
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept; extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept;
#define DEFAULT_CLASS(default_cls) \ #define DEFAULT_CLASS(default_cls) \
void* operator new(size_t size, BoxedClass * cls) __attribute__((visibility("default"))) { \ void* operator new(size_t size, BoxedClass * cls) __attribute__((visibility("default"))) { \
......
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
#include "core/types.h" #include "core/types.h"
#include "core/util.h" #include "core/util.h"
#include "gc/heap.h" #include "gc/heap.h"
#include "runtime/objmodel.h"
#include "runtime/types.h" #include "runtime/types.h"
#ifndef NVALGRIND #ifndef NVALGRIND
...@@ -288,8 +289,10 @@ static void markPhase() { ...@@ -288,8 +289,10 @@ static void markPhase() {
#endif #endif
} }
static void sweepPhase() { static void sweepPhase(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
global_heap.freeUnmarked(); // we need to use the allocator here because these objects are referenced only here, and calling the weakref
// callbacks could start another gc
global_heap.freeUnmarked(weakly_referenced);
} }
static bool gc_enabled = true; static bool gc_enabled = true;
...@@ -316,7 +319,24 @@ void runCollection() { ...@@ -316,7 +319,24 @@ void runCollection() {
Timer _t("collecting", /*min_usec=*/10000); Timer _t("collecting", /*min_usec=*/10000);
markPhase(); markPhase();
sweepPhase(); std::list<Box*, StlCompatAllocator<Box*>> weakly_referenced;
sweepPhase(weakly_referenced);
for (auto o : weakly_referenced) {
PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(o);
while (PyWeakReference* head = *list) {
if (head->wr_object != Py_None) {
_PyWeakref_ClearRef(head);
if (head->wr_callback) {
runtimeCall(head->wr_callback, ArgPassSpec(1), reinterpret_cast<Box*>(head), NULL, NULL, NULL,
NULL);
head->wr_callback = NULL;
}
}
}
}
if (VERBOSITY("gc") >= 2) if (VERBOSITY("gc") >= 2)
printf("Collection #%d done\n\n", ncollections); printf("Collection #%d done\n\n", ncollections);
......
...@@ -25,9 +25,9 @@ ...@@ -25,9 +25,9 @@
#endif #endif
namespace pyston { namespace pyston {
namespace gc { namespace gc {
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) __attribute__((visibility("default")));
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) { extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
size_t alloc_bytes = bytes + sizeof(GCAllocation); size_t alloc_bytes = bytes + sizeof(GCAllocation);
...@@ -95,7 +95,6 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) { ...@@ -95,7 +95,6 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
return r; return r;
} }
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) __attribute__((visibility("default")));
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) { extern "C" inline void* gc_realloc(void* ptr, size_t bytes) {
// Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is: // Normal realloc() supports receiving a NULL pointer, but we need to know what the GCKind is:
assert(ptr); assert(ptr);
...@@ -120,7 +119,6 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) { ...@@ -120,7 +119,6 @@ extern "C" inline void* gc_realloc(void* ptr, size_t bytes) {
#endif #endif
} }
extern "C" inline void gc_free(void* ptr) __attribute__((visibility("default")));
extern "C" inline void gc_free(void* ptr) { extern "C" inline void gc_free(void* ptr) {
assert(ptr); assert(ptr);
#ifndef NVALGRIND #ifndef NVALGRIND
......
...@@ -33,7 +33,7 @@ ...@@ -33,7 +33,7 @@
namespace pyston { namespace pyston {
namespace gc { namespace gc {
void _doFree(GCAllocation* al); bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced);
// lots of linked lists around here, so let's just use template functions for operations on them. // lots of linked lists around here, so let's just use template functions for operations on them.
template <class ListT> inline void nullNextPrev(ListT* node) { template <class ListT> inline void nullNextPrev(ListT* node) {
...@@ -75,7 +75,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func ...@@ -75,7 +75,8 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func
} }
} }
template <class ListT, typename Free> inline void sweepList(ListT* head, Free free_func) { template <class ListT, typename Free>
inline void sweepList(ListT* head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced, Free free_func) {
auto cur = head; auto cur = head;
while (cur) { while (cur) {
GCAllocation* al = cur->data; GCAllocation* al = cur->data;
...@@ -83,13 +84,14 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr ...@@ -83,13 +84,14 @@ template <class ListT, typename Free> inline void sweepList(ListT* head, Free fr
clearMark(al); clearMark(al);
cur = cur->next; cur = cur->next;
} else { } else {
_doFree(al); if (_doFree(al, &weakly_referenced)) {
removeFromLL(cur); removeFromLL(cur);
auto to_free = cur; auto to_free = cur;
cur = cur->next; cur = cur->next;
free_func(to_free); free_func(to_free);
}
} }
} }
} }
...@@ -124,7 +126,7 @@ void registerGCManagedBytes(size_t bytes) { ...@@ -124,7 +126,7 @@ void registerGCManagedBytes(size_t bytes) {
Heap global_heap; Heap global_heap;
void _doFree(GCAllocation* al) { bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced) {
if (VERBOSITY() >= 2) if (VERBOSITY() >= 2)
printf("Freeing %p\n", al->user_data); printf("Freeing %p\n", al->user_data);
...@@ -145,14 +147,24 @@ void _doFree(GCAllocation* al) { ...@@ -145,14 +147,24 @@ void _doFree(GCAllocation* al) {
VALGRIND_ENABLE_ERROR_REPORTING; VALGRIND_ENABLE_ERROR_REPORTING;
#endif #endif
if (PyType_SUPPORTS_WEAKREFS(b->cls)) {
PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(b);
if (list && *list) {
assert(weakly_referenced && "attempting to free a weakly referenced object manually");
weakly_referenced->push_back(b);
return false;
}
}
ASSERT(b->cls->tp_dealloc == NULL, "%s", getTypeName(b)); ASSERT(b->cls->tp_dealloc == NULL, "%s", getTypeName(b));
if (b->cls->simple_destructor) if (b->cls->simple_destructor)
b->cls->simple_destructor(b); b->cls->simple_destructor(b);
} }
return true;
} }
void Heap::destructContents(GCAllocation* al) { void Heap::destructContents(GCAllocation* al) {
_doFree(al); _doFree(al, NULL);
} }
struct HeapStatistics { struct HeapStatistics {
...@@ -281,8 +293,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) { ...@@ -281,8 +293,8 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) {
return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]); return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]);
} }
void SmallArena::freeUnmarked() { void SmallArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
thread_caches.forEachValue([this](ThreadBlockCache* cache) { thread_caches.forEachValue([this, &weakly_referenced](ThreadBlockCache* cache) {
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block* h = cache->cache_free_heads[bidx]; Block* h = cache->cache_free_heads[bidx];
// Try to limit the amount of unused memory a thread can hold onto; // Try to limit the amount of unused memory a thread can hold onto;
...@@ -302,8 +314,8 @@ void SmallArena::freeUnmarked() { ...@@ -302,8 +314,8 @@ void SmallArena::freeUnmarked() {
insertIntoLL(&heads[bidx], h); insertIntoLL(&heads[bidx], h);
} }
Block** chain_end = _freeChain(&cache->cache_free_heads[bidx]); Block** chain_end = _freeChain(&cache->cache_free_heads[bidx], weakly_referenced);
_freeChain(&cache->cache_full_heads[bidx]); _freeChain(&cache->cache_full_heads[bidx], weakly_referenced);
while (Block* b = cache->cache_full_heads[bidx]) { while (Block* b = cache->cache_full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -313,8 +325,8 @@ void SmallArena::freeUnmarked() { ...@@ -313,8 +325,8 @@ void SmallArena::freeUnmarked() {
}); });
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) { for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block** chain_end = _freeChain(&heads[bidx]); Block** chain_end = _freeChain(&heads[bidx], weakly_referenced);
_freeChain(&full_heads[bidx]); _freeChain(&full_heads[bidx], weakly_referenced);
while (Block* b = full_heads[bidx]) { while (Block* b = full_heads[bidx]) {
removeFromLLAndNull(b); removeFromLLAndNull(b);
...@@ -341,7 +353,7 @@ void SmallArena::getStatistics(HeapStatistics* stats) { ...@@ -341,7 +353,7 @@ void SmallArena::getStatistics(HeapStatistics* stats) {
} }
SmallArena::Block** SmallArena::_freeChain(Block** head) { SmallArena::Block** SmallArena::_freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
while (Block* b = *head) { while (Block* b = *head) {
int num_objects = b->numObjects(); int num_objects = b->numObjects();
int first_obj = b->minObjIndex(); int first_obj = b->minObjIndex();
...@@ -359,10 +371,8 @@ SmallArena::Block** SmallArena::_freeChain(Block** head) { ...@@ -359,10 +371,8 @@ SmallArena::Block** SmallArena::_freeChain(Block** head) {
if (isMarked(al)) { if (isMarked(al)) {
clearMark(al); clearMark(al);
} else { } else {
_doFree(al); if (_doFree(al, &weakly_referenced))
b->isfree.set(atom_idx);
// assert(p != (void*)0x127000d960); // the main module
b->isfree.set(atom_idx);
} }
} }
...@@ -568,8 +578,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) { ...@@ -568,8 +578,8 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void LargeArena::freeUnmarked() { void LargeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
sweepList(head, [this](LargeObj* ptr) { _freeLargeObj(ptr); }); sweepList(head, weakly_referenced, [this](LargeObj* ptr) { _freeLargeObj(ptr); });
} }
void LargeArena::getStatistics(HeapStatistics* stats) { void LargeArena::getStatistics(HeapStatistics* stats) {
...@@ -760,8 +770,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) { ...@@ -760,8 +770,8 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) {
return NULL; return NULL;
} }
void HugeArena::freeUnmarked() { void HugeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
sweepList(head, [this](HugeObj* ptr) { _freeHugeObj(ptr); }); sweepList(head, weakly_referenced, [this](HugeObj* ptr) { _freeHugeObj(ptr); });
} }
void HugeArena::getStatistics(HeapStatistics* stats) { void HugeArena::getStatistics(HeapStatistics* stats) {
......
...@@ -17,12 +17,61 @@ ...@@ -17,12 +17,61 @@
#include <cstddef> #include <cstddef>
#include <cstdint> #include <cstdint>
#include <list>
#include <sys/mman.h> #include <sys/mman.h>
#include "core/common.h" #include "core/common.h"
#include "core/threading.h" #include "core/threading.h"
namespace pyston { namespace pyston {
namespace gc {
extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) __attribute__((visibility("default")));
extern "C" inline void* gc_realloc(void* ptr, size_t bytes) __attribute__((visibility("default")));
extern "C" inline void gc_free(void* ptr) __attribute__((visibility("default")));
}
template <class T> class StlCompatAllocator {
public:
typedef size_t size_type;
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef std::ptrdiff_t difference_type;
StlCompatAllocator() {}
template <class U> StlCompatAllocator(const StlCompatAllocator<U>& other) {}
template <class U> struct rebind { typedef StlCompatAllocator<U> other; };
pointer allocate(size_t n) {
size_t to_allocate = n * sizeof(value_type);
// assert(to_allocate < (1<<16));
return reinterpret_cast<pointer>(gc_alloc(to_allocate, gc::GCKind::CONSERVATIVE));
}
void deallocate(pointer p, size_t n) { gc::gc_free(p); }
// I would never be able to come up with this on my own:
// http://en.cppreference.com/w/cpp/memory/allocator/construct
template <class U, class... Args> void construct(U* p, Args&&... args) {
::new ((void*)p) U(std::forward<Args>(args)...);
}
template <class U> void destroy(U* p) { p->~U(); }
bool operator==(const StlCompatAllocator<T>& rhs) const { return true; }
bool operator!=(const StlCompatAllocator<T>& rhs) const { return false; }
};
template <typename K, typename V, typename Hash = std::hash<K>, typename KeyEqual = std::equal_to<K>>
class conservative_unordered_map
: public std::unordered_map<K, V, Hash, KeyEqual, StlCompatAllocator<std::pair<const K, V>>> {};
namespace gc { namespace gc {
// Notify the gc of n bytes as being under GC management. // Notify the gc of n bytes as being under GC management.
...@@ -146,7 +195,7 @@ public: ...@@ -146,7 +195,7 @@ public:
void free(GCAllocation* al); void free(GCAllocation* al);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -277,7 +326,7 @@ private: ...@@ -277,7 +326,7 @@ private:
Block* _allocBlock(uint64_t size, Block** prev); Block* _allocBlock(uint64_t size, Block** prev);
GCAllocation* _allocFromBlock(Block* b); GCAllocation* _allocFromBlock(Block* b);
Block* _claimBlock(size_t rounded_size, Block** free_head); Block* _claimBlock(size_t rounded_size, Block** free_head);
Block** _freeChain(Block** head); Block** _freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void _getChainStatistics(HeapStatistics* stats, Block** head); void _getChainStatistics(HeapStatistics* stats, Block** head);
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx); GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
...@@ -350,7 +399,7 @@ public: ...@@ -350,7 +399,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
}; };
...@@ -368,7 +417,7 @@ public: ...@@ -368,7 +417,7 @@ public:
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr); GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
...@@ -475,10 +524,10 @@ public: ...@@ -475,10 +524,10 @@ public:
return NULL; return NULL;
} }
// not thread safe: // not thread safe:
void freeUnmarked() { void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
small_arena.freeUnmarked(); small_arena.freeUnmarked(weakly_referenced);
large_arena.freeUnmarked(); large_arena.freeUnmarked(weakly_referenced);
huge_arena.freeUnmarked(); huge_arena.freeUnmarked(weakly_referenced);
} }
void dumpHeapStatistics(); void dumpHeapStatistics();
......
...@@ -426,10 +426,6 @@ extern "C" PyObject* PyObject_Call(PyObject* callable_object, PyObject* args, Py ...@@ -426,10 +426,6 @@ extern "C" PyObject* PyObject_Call(PyObject* callable_object, PyObject* args, Py
} }
} }
extern "C" void PyObject_ClearWeakRefs(PyObject* object) noexcept {
Py_FatalError("unimplemented");
}
extern "C" int PyObject_GetBuffer(PyObject* obj, Py_buffer* view, int flags) noexcept { extern "C" int PyObject_GetBuffer(PyObject* obj, Py_buffer* view, int flags) noexcept {
if (!PyObject_CheckBuffer(obj)) { if (!PyObject_CheckBuffer(obj)) {
printf("%s\n", obj->cls->tp_name); printf("%s\n", obj->cls->tp_name);
......
...@@ -301,6 +301,7 @@ BoxedClass::BoxedClass(BoxedClass* base, gcvisit_func gc_visit, int attrs_offset ...@@ -301,6 +301,7 @@ BoxedClass::BoxedClass(BoxedClass* base, gcvisit_func gc_visit, int attrs_offset
tp_flags |= Py_TPFLAGS_BASETYPE; tp_flags |= Py_TPFLAGS_BASETYPE;
tp_flags |= Py_TPFLAGS_HAVE_CLASS; tp_flags |= Py_TPFLAGS_HAVE_CLASS;
tp_flags |= Py_TPFLAGS_HAVE_GC; tp_flags |= Py_TPFLAGS_HAVE_GC;
tp_flags |= Py_TPFLAGS_HAVE_WEAKREFS;
tp_base = base; tp_base = base;
......
...@@ -66,6 +66,7 @@ extern "C" void init_codecs(); ...@@ -66,6 +66,7 @@ extern "C" void init_codecs();
extern "C" void init_socket(); extern "C" void init_socket();
extern "C" void _PyUnicode_Init(); extern "C" void _PyUnicode_Init();
extern "C" void initunicodedata(); extern "C" void initunicodedata();
extern "C" void init_weakref();
namespace pyston { namespace pyston {
...@@ -78,7 +79,7 @@ bool IN_SHUTDOWN = false; ...@@ -78,7 +79,7 @@ bool IN_SHUTDOWN = false;
#define SLICE_STEP_OFFSET ((char*)&(((BoxedSlice*)0x01)->step) - (char*)0x1) #define SLICE_STEP_OFFSET ((char*)&(((BoxedSlice*)0x01)->step) - (char*)0x1)
// Analogue of PyType_GenericAlloc (default tp_alloc), but should only be used for Pyston classes! // Analogue of PyType_GenericAlloc (default tp_alloc), but should only be used for Pyston classes!
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept { extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept {
assert(cls); assert(cls);
RELEASE_ASSERT(nitems == 0, ""); RELEASE_ASSERT(nitems == 0, "");
RELEASE_ASSERT(cls->tp_itemsize == 0, ""); RELEASE_ASSERT(cls->tp_itemsize == 0, "");
...@@ -244,7 +245,8 @@ Box* Box::nextIC() { ...@@ -244,7 +245,8 @@ Box* Box::nextIC() {
std::string builtinStr("__builtin__"); std::string builtinStr("__builtin__");
extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f) extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f)
: f(f), closure(NULL), isGenerator(false), ndefaults(0), defaults(NULL), modname(NULL), name(NULL) { : in_weakreflist(NULL), f(f), closure(NULL), isGenerator(false), ndefaults(0), defaults(NULL), modname(NULL),
name(NULL) {
if (f->source) { if (f->source) {
this->modname = f->source->parent_module->getattr("__name__", NULL); this->modname = f->source->parent_module->getattr("__name__", NULL);
} else { } else {
...@@ -258,7 +260,8 @@ extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f) ...@@ -258,7 +260,8 @@ extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f)
extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f, std::initializer_list<Box*> defaults, extern "C" BoxedFunctionBase::BoxedFunctionBase(CLFunction* f, std::initializer_list<Box*> defaults,
BoxedClosure* closure, bool isGenerator) BoxedClosure* closure, bool isGenerator)
: f(f), closure(closure), isGenerator(isGenerator), ndefaults(0), defaults(NULL), modname(NULL), name(NULL) { : in_weakreflist(NULL), f(f), closure(closure), isGenerator(isGenerator), ndefaults(0), defaults(NULL),
modname(NULL), name(NULL) {
if (defaults.size()) { if (defaults.size()) {
// make sure to initialize defaults first, since the GC behavior is triggered by ndefaults, // make sure to initialize defaults first, since the GC behavior is triggered by ndefaults,
// and a GC can happen within this constructor: // and a GC can happen within this constructor:
...@@ -485,6 +488,23 @@ extern "C" void sliceGCHandler(GCVisitor* v, Box* b) { ...@@ -485,6 +488,23 @@ extern "C" void sliceGCHandler(GCVisitor* v, Box* b) {
v->visit(sl->step); v->visit(sl->step);
} }
static int call_gc_visit(PyObject* val, void* arg) {
if (val) {
GCVisitor* v = static_cast<GCVisitor*>(arg);
v->visit(val);
}
return 0;
}
static void proxy_to_tp_traverse(GCVisitor* v, Box* b) {
boxGCHandler(v, b);
b->cls->tp_traverse(b, call_gc_visit, v);
}
static void proxy_to_tp_clear(Box* b) {
b->cls->tp_clear(b);
}
// This probably belongs in tuple.cpp? // This probably belongs in tuple.cpp?
extern "C" void tupleGCHandler(GCVisitor* v, Box* b) { extern "C" void tupleGCHandler(GCVisitor* v, Box* b) {
boxGCHandler(v, b); boxGCHandler(v, b);
...@@ -1257,13 +1277,18 @@ void setupRuntime() { ...@@ -1257,13 +1277,18 @@ void setupRuntime() {
float_cls = new BoxedHeapClass(object_cls, NULL, 0, sizeof(BoxedFloat), false, "float"); float_cls = new BoxedHeapClass(object_cls, NULL, 0, sizeof(BoxedFloat), false, "float");
function_cls = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedFunction, attrs), function_cls = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedFunction, attrs),
sizeof(BoxedFunction), false, "function"); sizeof(BoxedFunction), false, "function");
function_cls->tp_weaklistoffset = offsetof(BoxedFunction, in_weakreflist);
builtin_function_or_method_cls builtin_function_or_method_cls
= new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedBuiltinFunctionOrMethod, attrs), = new BoxedHeapClass(object_cls, &functionGCHandler, offsetof(BoxedBuiltinFunctionOrMethod, attrs),
sizeof(BoxedBuiltinFunctionOrMethod), false, "builtin_function_or_method"); sizeof(BoxedBuiltinFunctionOrMethod), false, "builtin_function_or_method");
builtin_function_or_method_cls->tp_weaklistoffset = offsetof(BoxedBuiltinFunctionOrMethod, in_weakreflist);
function_cls->simple_destructor = builtin_function_or_method_cls->simple_destructor = functionDtor; function_cls->simple_destructor = builtin_function_or_method_cls->simple_destructor = functionDtor;
instancemethod_cls = new BoxedHeapClass(object_cls, &instancemethodGCHandler, 0, sizeof(BoxedInstanceMethod), false, instancemethod_cls = new BoxedHeapClass(object_cls, &instancemethodGCHandler, 0, sizeof(BoxedInstanceMethod), false,
"instancemethod"); "instancemethod");
instancemethod_cls->tp_weaklistoffset = offsetof(BoxedInstanceMethod, in_weakreflist);
list_cls = new BoxedHeapClass(object_cls, &listGCHandler, 0, sizeof(BoxedList), false, "list"); list_cls = new BoxedHeapClass(object_cls, &listGCHandler, 0, sizeof(BoxedList), false, "list");
slice_cls = new BoxedHeapClass(object_cls, &sliceGCHandler, 0, sizeof(BoxedSlice), false, "slice"); slice_cls = new BoxedHeapClass(object_cls, &sliceGCHandler, 0, sizeof(BoxedSlice), false, "slice");
dict_cls = new BoxedHeapClass(object_cls, &dictGCHandler, 0, sizeof(BoxedDict), false, "dict"); dict_cls = new BoxedHeapClass(object_cls, &dictGCHandler, 0, sizeof(BoxedDict), false, "dict");
...@@ -1447,6 +1472,26 @@ void setupRuntime() { ...@@ -1447,6 +1472,26 @@ void setupRuntime() {
init_codecs(); init_codecs();
init_socket(); init_socket();
initunicodedata(); initunicodedata();
init_weakref();
// some additional setup to ensure weakrefs participate in our GC
BoxedClass* weakref_ref_cls = &_PyWeakref_RefType;
weakref_ref_cls->tp_alloc = PystonType_GenericAlloc;
weakref_ref_cls->gc_visit = proxy_to_tp_traverse;
weakref_ref_cls->simple_destructor = proxy_to_tp_clear;
weakref_ref_cls->is_pyston_class = true;
BoxedClass* weakref_proxy_cls = &_PyWeakref_ProxyType;
weakref_proxy_cls->tp_alloc = PystonType_GenericAlloc;
weakref_proxy_cls->gc_visit = proxy_to_tp_traverse;
weakref_proxy_cls->simple_destructor = proxy_to_tp_clear;
weakref_proxy_cls->is_pyston_class = true;
BoxedClass* weakref_callableproxy = &_PyWeakref_CallableProxyType;
weakref_callableproxy->tp_alloc = PystonType_GenericAlloc;
weakref_callableproxy->gc_visit = proxy_to_tp_traverse;
weakref_callableproxy->simple_destructor = proxy_to_tp_clear;
weakref_callableproxy->is_pyston_class = true;
setupSysEnd(); setupSysEnd();
......
...@@ -134,46 +134,6 @@ Box* objectStr(Box*); ...@@ -134,46 +134,6 @@ Box* objectStr(Box*);
Box* objectRepr(Box*); Box* objectRepr(Box*);
template <class T> class StlCompatAllocator {
public:
typedef size_t size_type;
typedef T value_type;
typedef T* pointer;
typedef const T* const_pointer;
typedef T& reference;
typedef const T& const_reference;
typedef std::ptrdiff_t difference_type;
StlCompatAllocator() {}
template <class U> StlCompatAllocator(const StlCompatAllocator<U>& other) {}
template <class U> struct rebind { typedef StlCompatAllocator<U> other; };
pointer allocate(size_t n) {
size_t to_allocate = n * sizeof(value_type);
// assert(to_allocate < (1<<16));
return reinterpret_cast<pointer>(gc_alloc(to_allocate, gc::GCKind::CONSERVATIVE));
}
void deallocate(pointer p, size_t n) { gc::gc_free(p); }
// I would never be able to come up with this on my own:
// http://en.cppreference.com/w/cpp/memory/allocator/construct
template <class U, class... Args> void construct(U* p, Args&&... args) {
::new ((void*)p) U(std::forward<Args>(args)...);
}
template <class U> void destroy(U* p) { p->~U(); }
bool operator==(const StlCompatAllocator<T>& rhs) const { return true; }
bool operator!=(const StlCompatAllocator<T>& rhs) const { return false; }
};
template <typename K, typename V, typename Hash = std::hash<K>, typename KeyEqual = std::equal_to<K>>
class conservative_unordered_map
: public std::unordered_map<K, V, Hash, KeyEqual, StlCompatAllocator<std::pair<const K, V>>> {};
class BoxedClass : public BoxVar { class BoxedClass : public BoxVar {
public: public:
typedef void (*gcvisit_func)(GCVisitor*, Box*); typedef void (*gcvisit_func)(GCVisitor*, Box*);
...@@ -363,10 +323,13 @@ class BoxedUnicode : public Box { ...@@ -363,10 +323,13 @@ class BoxedUnicode : public Box {
class BoxedInstanceMethod : public Box { class BoxedInstanceMethod : public Box {
public: public:
Box** in_weakreflist;
// obj is NULL for unbound instancemethod // obj is NULL for unbound instancemethod
Box* obj, *func; Box* obj, *func;
BoxedInstanceMethod(Box* obj, Box* func) __attribute__((visibility("default"))) : obj(obj), func(func) {} BoxedInstanceMethod(Box* obj, Box* func) __attribute__((visibility("default")))
: in_weakreflist(NULL), obj(obj), func(func) {}
DEFAULT_CLASS_SIMPLE(instancemethod_cls); DEFAULT_CLASS_SIMPLE(instancemethod_cls);
}; };
...@@ -448,6 +411,8 @@ static_assert(sizeof(BoxedDict) == sizeof(PyDictObject), ""); ...@@ -448,6 +411,8 @@ static_assert(sizeof(BoxedDict) == sizeof(PyDictObject), "");
class BoxedFunctionBase : public Box { class BoxedFunctionBase : public Box {
public: public:
Box** in_weakreflist;
HCAttrs attrs; HCAttrs attrs;
CLFunction* f; CLFunction* f;
BoxedClosure* closure; BoxedClosure* closure;
...@@ -639,6 +604,6 @@ Box* makeAttrWrapper(Box* b); ...@@ -639,6 +604,6 @@ Box* makeAttrWrapper(Box* b);
#define OverflowError ((BoxedClass*)PyExc_OverflowError) #define OverflowError ((BoxedClass*)PyExc_OverflowError)
// Our default for tp_alloc: // Our default for tp_alloc:
PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept; extern "C" PyObject* PystonType_GenericAlloc(BoxedClass* cls, Py_ssize_t nitems) noexcept;
} }
#endif #endif
import weakref
import gc
def cb(wr):
print "object was destroyed", wr()
def doStuff():
def meth():
pass
wr = weakref.ref(meth, cb)
return wr
w = doStuff()
gc.collect()
import weakref
def test_wr(o):
try:
r = weakref.ref(o)
print "passed", type(o)
return r
except:
print "failed", type(o)
def test():
1
wr = test_wr(test)
print wr() == test
#print weakref.getweakrefs(test)[0] == wr
print weakref.getweakrefcount(test)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment