Commit e8fefe9f authored by Chris Toshok's avatar Chris Toshok

build sorted vectors for large/huge objects at gc time, and use binarySearch...

build sorted vectors for large/huge objects at gc time, and use binarySearch to power ::allocationFrom
parent 367d8b22
......@@ -438,6 +438,8 @@ void runCollection() {
Timer _t("collecting", /*min_usec=*/10000);
global_heap.prepareForCollection();
markPhase();
// The sweep phase will not free weakly-referenced objects, so that we can inspect their
......@@ -500,6 +502,8 @@ void runCollection() {
}
}
global_heap.cleanupAfterCollection();
if (VERBOSITY("gc") >= 2)
printf("Collection #%d done\n\n", ncollections);
......
......@@ -661,7 +661,23 @@ void LargeArena::free(GCAllocation* al) {
_freeLargeObj(LargeObj::fromAllocation(al));
}
struct CompareObjLookupCache {
int operator()(const void* p, const ObjLookupCache& obj) {
if (p < (char*)obj.data)
return -1;
if (p >= (char*)obj.data + obj.size)
return 1;
return 0;
}
};
GCAllocation* LargeArena::allocationFrom(void* ptr) {
if (lookup.size()) {
int idx = binarySearch(ptr, lookup.begin(), lookup.end(), CompareObjLookupCache());
if (idx < 0)
return NULL;
return (GCAllocation*)lookup[idx].data;
} else {
LargeObj* obj = NULL;
for (obj = head; obj; obj = obj->next) {
......@@ -672,6 +688,19 @@ GCAllocation* LargeArena::allocationFrom(void* ptr) {
}
}
return NULL;
}
}
void LargeArena::prepareForCollection() {
for (LargeObj* lo = head; lo; lo = lo->next) {
lookup.push_back(ObjLookupCache(lo, &lo->data[0], lo->size));
}
std::sort(lookup.begin(), lookup.end(),
[](const ObjLookupCache& lo1, const ObjLookupCache& lo2) { return lo1.data < lo2.data; });
}
void LargeArena::cleanupAfterCollection() {
lookup.clear();
}
void LargeArena::freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
......@@ -682,7 +711,6 @@ void LargeArena::getStatistics(HeapStatistics* stats) {
forEach(head, [stats](LargeObj* obj) { addStatistic(stats, obj->data, obj->size); });
}
void LargeArena::add_free_chunk(LargeFreeChunk* free_chunks, size_t size) {
size_t num_chunks = size >> CHUNK_BITS;
......@@ -831,7 +859,7 @@ GCAllocation* HugeArena::alloc(size_t size) {
total_size = (total_size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1);
extendMapping(total_size);
HugeObj* rtn = (HugeObj*)allocFromArena(total_size);
rtn->obj_size = size;
rtn->size = size;
nullNextPrev(rtn);
insertIntoLL(&head, rtn);
......@@ -847,7 +875,7 @@ GCAllocation* HugeArena::realloc(GCAllocation* al, size_t bytes) {
return al;
GCAllocation* rtn = heap->alloc(bytes);
memcpy(rtn, al, std::min(bytes, obj->obj_size));
memcpy(rtn, al, std::min(bytes, obj->size));
_freeHugeObj(obj);
return rtn;
......@@ -858,13 +886,32 @@ void HugeArena::free(GCAllocation* al) {
}
GCAllocation* HugeArena::allocationFrom(void* ptr) {
if (lookup.size()) {
int idx = binarySearch(ptr, lookup.begin(), lookup.end(), CompareObjLookupCache());
if (idx < 0)
return NULL;
return (GCAllocation*)lookup[idx].data;
} else {
HugeObj* cur = head;
while (cur) {
if (ptr >= cur && ptr < &cur->data[cur->obj_size])
if (ptr >= cur && ptr < &cur->data[cur->size])
return &cur->data[0];
cur = cur->next;
}
return NULL;
}
}
void HugeArena::prepareForCollection() {
for (HugeObj* lo = head; lo; lo = lo->next) {
lookup.push_back(ObjLookupCache(lo, &lo->data[0], lo->size));
}
std::sort(lookup.begin(), lookup.end(),
[](const ObjLookupCache& lo1, const ObjLookupCache& lo2) { return lo1.data < lo2.data; });
}
void HugeArena::cleanupAfterCollection() {
lookup.clear();
}
void HugeArena::freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free) {
......
......@@ -227,6 +227,9 @@ public:
void getStatistics(HeapStatistics* stats);
void prepareForCollection() {}
void cleanupAfterCollection() {}
private:
template <int N> class Bitmap {
static_assert(N % 64 == 0, "");
......@@ -361,6 +364,15 @@ private:
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
};
struct ObjLookupCache {
void* objptr;
void* data;
size_t size;
ObjLookupCache(void* objptr, void* data, size_t size) : objptr(objptr), data(data), size(size) {}
};
//
// The LargeArena allocates objects where 3584 < size <1024*1024-CHUNK_SIZE-sizeof(LargeObject) bytes.
//
......@@ -407,6 +419,7 @@ private:
static constexpr int NUM_FREE_LISTS = 32;
std::vector<ObjLookupCache> lookup; // used during gc's to speed up finding large object GCAllocations
Heap* heap;
LargeObj* head;
LargeBlock* blocks;
......@@ -431,6 +444,9 @@ public:
void freeUnmarked(std::vector<Box*>& weakly_referenced, std::vector<BoxedClass*>& classes_to_free);
void getStatistics(HeapStatistics* stats);
void prepareForCollection();
void cleanupAfterCollection();
};
// The HugeArena allocates objects where size > 1024*1024 bytes.
......@@ -450,14 +466,17 @@ public:
void getStatistics(HeapStatistics* stats);
void prepareForCollection();
void cleanupAfterCollection();
private:
struct HugeObj {
HugeObj* next, **prev;
size_t obj_size;
size_t size;
GCAllocation data[0];
int mmap_size() {
size_t total_size = obj_size + sizeof(HugeObj);
size_t total_size = size + sizeof(HugeObj);
total_size = (total_size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1);
return total_size;
}
......@@ -474,6 +493,7 @@ private:
void _freeHugeObj(HugeObj* lobj);
HugeObj* head;
std::vector<ObjLookupCache> lookup; // used during gc's to speed up finding large object GCAllocations
Heap* heap;
};
......@@ -552,6 +572,18 @@ public:
huge_arena.freeUnmarked(weakly_referenced, classes_to_free);
}
void prepareForCollection() {
small_arena.prepareForCollection();
large_arena.prepareForCollection();
huge_arena.prepareForCollection();
}
void cleanupAfterCollection() {
small_arena.cleanupAfterCollection();
large_arena.cleanupAfterCollection();
huge_arena.cleanupAfterCollection();
}
void dumpHeapStatistics(int level);
private:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment