Commit 930a382e authored by Kevin Modzelewski's avatar Kevin Modzelewski

Change our GC handling of weakrefs

For weakly-referenced objects that are garbage, we end up freeing their
attributes within that collection (assuming they are garbage as well).
This means that these objects are in a state that is not quite dead (we
don't want to allocate anything else in that space and clobber their
weakreflist), and not quite alive (their attributes point to garbage
memory).

So, change the handling to finish looking at those objects in the collection
that they become garbage, and then free them to make them properly dead.

weakrefs-handling is unavoidably reentrant, but now we only have to worry
about fully-dead (the referents) or fully-alive (the weakrefs) objects.
parent fdb0edde
......@@ -206,6 +206,7 @@ void ASTInterpreter::setFrameInfo(const FrameInfo* frame_info) {
}
void ASTInterpreter::setGlobals(Box* globals) {
assert(gc::isValidGCObject(globals));
this->globals = globals;
}
......
......@@ -323,7 +323,7 @@ void markPhase() {
#endif
}
static void sweepPhase(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
static void sweepPhase(std::vector<Box*>& weakly_referenced) {
// we need to use the allocator here because these objects are referenced only here, and calling the weakref
// callbacks could start another gc
global_heap.freeUnmarked(weakly_referenced);
......@@ -341,6 +341,7 @@ void disableGC() {
}
static int ncollections = 0;
static bool should_not_reenter_gc = false;
void runCollection() {
static StatCounter sc("gc_collections");
sc.log();
......@@ -352,26 +353,63 @@ void runCollection() {
if (VERBOSITY("gc") >= 2)
printf("Collection #%d\n", ncollections);
// The bulk of the GC work is not reentrant-safe.
// In theory we should never try to reenter that section, but it's happened due to bugs,
// which show up as very-hard-to-understand gc issues.
// So keep track if we're in the non-reentrant section and abort if we try to go back in.
// We could also just skip the collection if we're currently in the gc, but I think if we
// run into this case it's way more likely that it's a bug than something we should ignore.
RELEASE_ASSERT(!should_not_reenter_gc, "");
should_not_reenter_gc = true; // begin non-reentrant section
Timer _t("collecting", /*min_usec=*/10000);
markPhase();
std::list<Box*, StlCompatAllocator<Box*>> weakly_referenced;
// The sweep phase will not free weakly-referenced objects, so that we can inspect their
// weakrefs_list. We want to defer looking at those lists until the end of the sweep phase,
// since the deallocation of other objects (namely, the weakref objects themselves) can affect
// those lists, and we want to see the final versions.
std::vector<Box*> weakly_referenced;
sweepPhase(weakly_referenced);
// Handle weakrefs in two passes:
// - first, find all of the weakref objects whose callbacks we need to call. we need to iterate
// over the garbage-and-corrupt-but-still-alive weakly_referenced list in order to find these objects,
// so the gc is not reentrant during this section. after this we discard that list.
// - then, call all the weakref callbacks we collected from the first pass.
// Use a StlCompatAllocator to keep the pending weakref objects alive in case we trigger a new collection.
// In theory we could push so much onto this list that we would cause a new collection to start:
std::list<PyWeakReference*, StlCompatAllocator<PyWeakReference*>> weak_references;
for (auto o : weakly_referenced) {
assert(isValidGCObject(o));
PyWeakReference** list = (PyWeakReference**)PyObject_GET_WEAKREFS_LISTPTR(o);
while (PyWeakReference* head = *list) {
assert(isValidGCObject(head));
if (head->wr_object != Py_None) {
assert(head->wr_object == o);
_PyWeakref_ClearRef(head);
if (head->wr_callback) {
runtimeCall(head->wr_callback, ArgPassSpec(1), reinterpret_cast<Box*>(head), NULL, NULL, NULL,
NULL);
head->wr_callback = NULL;
}
if (head->wr_callback)
weak_references.push_back(head);
}
}
global_heap.free(GCAllocation::fromUserData(o));
}
should_not_reenter_gc = false; // end non-reentrant section
while (!weak_references.empty()) {
PyWeakReference* head = weak_references.front();
weak_references.pop_front();
if (head->wr_callback) {
runtimeCall(head->wr_callback, ArgPassSpec(1), reinterpret_cast<Box*>(head), NULL, NULL, NULL, NULL);
head->wr_callback = NULL;
}
}
if (VERBOSITY("gc") >= 2)
......
......@@ -43,7 +43,7 @@ template <> void return_temporary_buffer<pyston::Box*>(pyston::Box** p) {
namespace pyston {
namespace gc {
bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced);
bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced);
// lots of linked lists around here, so let's just use template functions for operations on them.
template <class ListT> inline void nullNextPrev(ListT* node) {
......@@ -86,7 +86,7 @@ template <class ListT, typename Func> inline void forEach(ListT* list, Func func
}
template <class ListT, typename Free>
inline void sweepList(ListT* head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced, Free free_func) {
inline void sweepList(ListT* head, std::vector<Box*>& weakly_referenced, Free free_func) {
auto cur = head;
while (cur) {
GCAllocation* al = cur->data;
......@@ -137,7 +137,7 @@ void registerGCManagedBytes(size_t bytes) {
Heap global_heap;
bool _doFree(GCAllocation* al, std::list<Box*, StlCompatAllocator<Box*>>* weakly_referenced) {
bool _doFree(GCAllocation* al, std::vector<Box*>* weakly_referenced) {
if (VERBOSITY() >= 4)
printf("Freeing %p\n", al->user_data);
......@@ -371,7 +371,7 @@ GCAllocation* SmallArena::allocationFrom(void* ptr) {
return reinterpret_cast<GCAllocation*>(&b->atoms[atom_idx]);
}
void SmallArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
void SmallArena::freeUnmarked(std::vector<Box*>& weakly_referenced) {
thread_caches.forEachValue([this, &weakly_referenced](ThreadBlockCache* cache) {
for (int bidx = 0; bidx < NUM_BUCKETS; bidx++) {
Block* h = cache->cache_free_heads[bidx];
......@@ -431,7 +431,7 @@ void SmallArena::getStatistics(HeapStatistics* stats) {
}
SmallArena::Block** SmallArena::_freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weakly_referenced) {
while (Block* b = *head) {
int num_objects = b->numObjects();
int first_obj = b->minObjIndex();
......@@ -449,8 +449,10 @@ SmallArena::Block** SmallArena::_freeChain(Block** head, std::list<Box*, StlComp
if (isMarked(al)) {
clearMark(al);
} else {
if (_doFree(al, &weakly_referenced))
if (_doFree(al, &weakly_referenced)) {
b->isfree.set(atom_idx);
// memset(al->user_data, 0, b->size - sizeof(GCAllocation));
}
}
}
......@@ -656,7 +658,7 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) {
return NULL;
}
void LargeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
void LargeArena::freeUnmarked(std::vector<Box*>& weakly_referenced) {
sweepList(head, weakly_referenced, [this](LargeObj* ptr) { _freeLargeObj(ptr); });
}
......@@ -848,7 +850,7 @@ GCAllocation* HugeArena::allocationFrom(void* ptr) {
return NULL;
}
void HugeArena::freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
void HugeArena::freeUnmarked(std::vector<Box*>& weakly_referenced) {
sweepList(head, weakly_referenced, [this](HugeObj* ptr) { _freeHugeObj(ptr); });
}
......
......@@ -196,7 +196,7 @@ public:
void free(GCAllocation* al);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void freeUnmarked(std::vector<Box*>& weakly_referenced);
void getStatistics(HeapStatistics* stats);
......@@ -328,7 +328,7 @@ private:
Block* _allocBlock(uint64_t size, Block** prev);
GCAllocation* _allocFromBlock(Block* b);
Block* _claimBlock(size_t rounded_size, Block** free_head);
Block** _freeChain(Block** head, std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
Block** _freeChain(Block** head, std::vector<Box*>& weakly_referenced);
void _getChainStatistics(HeapStatistics* stats, Block** head);
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
......@@ -401,7 +401,7 @@ public:
void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void freeUnmarked(std::vector<Box*>& weakly_referenced);
void getStatistics(HeapStatistics* stats);
};
......@@ -419,7 +419,7 @@ public:
void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced);
void freeUnmarked(std::vector<Box*>& weakly_referenced);
void getStatistics(HeapStatistics* stats);
......@@ -530,7 +530,7 @@ public:
}
// not thread safe:
void freeUnmarked(std::list<Box*, StlCompatAllocator<Box*>>& weakly_referenced) {
void freeUnmarked(std::vector<Box*>& weakly_referenced) {
small_arena.freeUnmarked(weakly_referenced);
large_arena.freeUnmarked(weakly_referenced);
huge_arena.freeUnmarked(weakly_referenced);
......
......@@ -4741,6 +4741,7 @@ extern "C" void delGlobal(Box* globals, const std::string* name) {
extern "C" Box* getGlobal(Box* globals, const std::string* name) {
STAT_TIMER(t0, "us_timer_slowpath_getglobal");
ASSERT(gc::isValidGCObject(globals), "%p", globals);
static StatCounter slowpath_getglobal("slowpath_getglobal");
slowpath_getglobal.log();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment