Commit de60220f authored by Kevin Modzelewski's avatar Kevin Modzelewski

Fix class-freeing issue

need to free the metaclasses in the same collection.
parent e4d91e36
......@@ -921,9 +921,10 @@ static void openTraceFp(bool is_pre) {
fclose(trace_fp);
char tracefn_buf[80];
snprintf(tracefn_buf, sizeof(tracefn_buf), "gc_trace_%d.%03d%s.txt", getpid(), ncollections + is_pre,
snprintf(tracefn_buf, sizeof(tracefn_buf), "gc_trace_%d.%04d%s.txt", getpid(), ncollections + is_pre,
is_pre ? "_pre" : "");
trace_fp = fopen(tracefn_buf, "w");
assert(trace_fp);
}
static int _dummy() {
......@@ -1002,17 +1003,29 @@ void runCollection() {
}
// We want to make sure that classes get freed before their metaclasses.
// Use a simple approach of only freeing one level of the hierarchy and then
// letting the next collection do the next one.
llvm::DenseSet<BoxedClass*> classes_to_not_free;
for (auto b : classes_to_free) {
classes_to_not_free.insert(b->cls);
}
// So, while there are still more classes to free, free any classes that are
// not the metaclass of another class we will free. Then repeat.
//
// Note: our earlier approach of just deferring metaclasses to the next collection is
// not quite safe, since we will have freed everything that the class refers to.
while (!classes_to_free.empty()) {
llvm::DenseSet<BoxedClass*> classes_to_not_free;
for (auto b : classes_to_free) {
classes_to_not_free.insert(b->cls);
}
std::vector<BoxedClass*> deferred_classes;
for (auto b : classes_to_free) {
GC_TRACE_LOG("Dealing with the postponed free of class %p\n", b);
if (classes_to_not_free.count(b)) {
deferred_classes.push_back(b);
continue;
}
global_heap._setFree(GCAllocation::fromUserData(b));
}
for (auto b : classes_to_free) {
if (classes_to_not_free.count(b))
continue;
global_heap._setFree(GCAllocation::fromUserData(b));
assert(deferred_classes.size() < classes_to_free.size());
std::swap(deferred_classes, classes_to_free);
}
global_heap.cleanupAfterCollection();
......
......@@ -112,8 +112,6 @@ extern "C" inline void* gc_alloc(size_t bytes, GCKind kind_id) {
// if (VERBOSITY()) printf("Allocated %ld bytes at [%p, %p)\n", bytes, r, (char*)r + bytes);
#endif
GC_TRACE_LOG("Allocated %p\n", r);
#if STAT_ALLOCATIONS
gc_alloc_bytes.log(alloc_bytes);
gc_alloc_bytes_typed[(int)kind_id].log(alloc_bytes);
......
......@@ -186,7 +186,7 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>*
if (unlikely(isWeaklyReferenced(b))) {
assert(weakly_referenced && "attempting to free a weakly referenced object manually");
weakly_referenced->push_back(b);
GC_TRACE_LOG("%p is weakly referenced\n", al->user_data);
GC_TRACE_LOG("doFree: %p is weakly referenced\n", al->user_data);
return false;
}
......@@ -196,11 +196,12 @@ __attribute__((always_inline)) bool _doFree(GCAllocation* al, std::vector<Box*>*
if (unlikely(PyType_Check(b))) {
assert(classes_to_free);
classes_to_free->push_back(static_cast<BoxedClass*>(b));
GC_TRACE_LOG("doFree: %p is a class object\n", al->user_data);
return false;
}
if (b->cls->tp_dealloc != dealloc_null && b->cls->has_safe_tp_dealloc) {
GC_TRACE_LOG("running safe destructor for %p\n", b);
GC_TRACE_LOG("doFree: running safe destructor for %p\n", b);
gc_safe_destructors.log();
GCAllocation* al = GCAllocation::fromUserData(b);
......@@ -687,8 +688,10 @@ GCAllocation* SmallArena::_alloc(size_t rounded_size, int bucket_idx) {
while (true) {
while (Block* cache_block = *cache_head) {
GCAllocation* rtn = _allocFromBlock(cache_block);
if (rtn)
if (rtn) {
GC_TRACE_LOG("Allocated %p\n", rtn->user_data);
return rtn;
}
removeFromLLAndNull(cache_block);
insertIntoLL(&cache->cache_full_heads[bucket_idx], cache_block);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment