Commit 7cd29d79 authored by Chris Toshok's avatar Chris Toshok

mucho cleanup

parent 1a268096
This diff is collapsed.
...@@ -65,17 +65,20 @@ inline void clearMark(GCAllocation* header) { ...@@ -65,17 +65,20 @@ inline void clearMark(GCAllocation* header) {
#define PAGE_SIZE 4096 #define PAGE_SIZE 4096
template <uintptr_t start> class Arena { template <uintptr_t arena_start, uintptr_t arena_size> class Arena {
private: private:
void* cur; void* cur;
void* end;
protected: protected:
Arena() : cur((void*)start) {} Arena() : cur((void*)arena_start), end((void*)(arena_start + arena_size)) {}
public: public:
void* doMmap(size_t size) { void* doMmap(size_t size) {
assert(size % PAGE_SIZE == 0); assert(size % PAGE_SIZE == 0);
assert(((uint8_t*)cur + size) < end && "arena full");
void* mrtn = mmap(cur, size, PROT_READ | PROT_WRITE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); void* mrtn = mmap(cur, size, PROT_READ | PROT_WRITE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
assert((uintptr_t)mrtn != -1 && "failed to allocate memory from OS"); assert((uintptr_t)mrtn != -1 && "failed to allocate memory from OS");
ASSERT(mrtn == cur, "%p %p\n", mrtn, cur); ASSERT(mrtn == cur, "%p %p\n", mrtn, cur);
...@@ -83,9 +86,10 @@ public: ...@@ -83,9 +86,10 @@ public:
return mrtn; return mrtn;
} }
bool contains(void* addr) { return (void*)start <= addr && addr < cur; } bool contains(void* addr) { return (void*)arena_start <= addr && addr < cur; }
}; };
constexpr uintptr_t ARENA_SIZE = 0x1000000000L;
constexpr uintptr_t SMALL_ARENA_START = 0x1270000000L; constexpr uintptr_t SMALL_ARENA_START = 0x1270000000L;
constexpr uintptr_t LARGE_ARENA_START = 0x2270000000L; constexpr uintptr_t LARGE_ARENA_START = 0x2270000000L;
constexpr uintptr_t HUGE_ARENA_START = 0x3270000000L; constexpr uintptr_t HUGE_ARENA_START = 0x3270000000L;
...@@ -94,8 +98,8 @@ constexpr uintptr_t HUGE_ARENA_START = 0x3270000000L; ...@@ -94,8 +98,8 @@ constexpr uintptr_t HUGE_ARENA_START = 0x3270000000L;
// //
// The SmallArena allocates objects <= 3584 bytes. // The SmallArena allocates objects <= 3584 bytes.
// //
// it uses segregated-fit allocation, and each block contains free // it uses segregated-fit allocation, and each block contains a free
// bitmap for objects of a given size (assigned to the block) // bitmap for objects of a given size (constant for the block)
// //
static const size_t sizes[] = { static const size_t sizes[] = {
16, 32, 48, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 16, 32, 48, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384,
...@@ -103,8 +107,20 @@ static const size_t sizes[] = { ...@@ -103,8 +107,20 @@ static const size_t sizes[] = {
}; };
static constexpr size_t NUM_BUCKETS = sizeof(sizes) / sizeof(sizes[0]); static constexpr size_t NUM_BUCKETS = sizeof(sizes) / sizeof(sizes[0]);
class SmallArena : public Arena<SMALL_ARENA_START> {
class SmallArena : public Arena<SMALL_ARENA_START, ARENA_SIZE> {
public: public:
SmallArena(Heap* heap) : Arena(), heap(heap), thread_caches(heap, this) {}
GCAllocation* __attribute__((__malloc__)) alloc(size_t bytes);
GCAllocation* realloc(GCAllocation* alloc, size_t bytes);
void free(GCAllocation* al);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked();
void getStatistics(HeapStatistics* stats);
private: private:
template <int N> class Bitmap { template <int N> class Bitmap {
static_assert(N % 64 == 0, ""); static_assert(N % 64 == 0, "");
...@@ -205,8 +221,7 @@ private: ...@@ -205,8 +221,7 @@ private:
static_assert(offsetof(Block, _header_end) >= BLOCK_HEADER_SIZE, "bad header size"); static_assert(offsetof(Block, _header_end) >= BLOCK_HEADER_SIZE, "bad header size");
static_assert(offsetof(Block, _header_end) <= BLOCK_HEADER_SIZE, "bad header size"); static_assert(offsetof(Block, _header_end) <= BLOCK_HEADER_SIZE, "bad header size");
// forward (public) definition of ThreadBlockCache so we can reference it both in this class (privately) and in Heap
// (for a friend ref).
struct ThreadBlockCache { struct ThreadBlockCache {
Heap* heap; Heap* heap;
SmallArena* small; SmallArena* small;
...@@ -221,7 +236,6 @@ private: ...@@ -221,7 +236,6 @@ private:
}; };
Block* heads[NUM_BUCKETS]; Block* heads[NUM_BUCKETS];
Block* full_heads[NUM_BUCKETS]; Block* full_heads[NUM_BUCKETS];
...@@ -231,71 +245,46 @@ private: ...@@ -231,71 +245,46 @@ private:
// TODO only use thread caches if we're in GRWL mode? // TODO only use thread caches if we're in GRWL mode?
threading::PerThreadSet<ThreadBlockCache, Heap*, SmallArena*> thread_caches; threading::PerThreadSet<ThreadBlockCache, Heap*, SmallArena*> thread_caches;
Block* _allocBlock(uint64_t size, Block** prev);
Block* alloc_block(uint64_t size, Block** prev); GCAllocation* _allocFromBlock(Block* b);
GCAllocation* allocFromBlock(Block* b); Block* _claimBlock(size_t rounded_size, Block** free_head);
Block* claimBlock(size_t rounded_size, Block** free_head); Block** _freeChain(Block** head);
void insertIntoLL(Block** next_pointer, Block* next); void _getChainStatistics(HeapStatistics* stats, Block** head);
void removeFromLL(Block* b);
Block** freeChain(Block** head);
void getChainStatistics(HeapStatistics* stats, Block** head);
GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx); GCAllocation* __attribute__((__malloc__)) _alloc(size_t bytes, int bucket_idx);
void _free(GCAllocation* al, Block* b);
public:
SmallArena(Heap* heap) : Arena(), heap(heap), thread_caches(heap, this) {}
GCAllocation* __attribute__((__malloc__)) alloc(size_t bytes) {
if (bytes <= 16)
return _alloc(16, 0);
else if (bytes <= 32)
return _alloc(32, 1);
else {
for (int i = 2; i < NUM_BUCKETS; i++) {
if (sizes[i] >= bytes) {
return _alloc(sizes[i], i);
}
}
return NULL;
}
}
GCAllocation* realloc(GCAllocation* alloc, size_t bytes);
void free(GCAllocation* al) {
Block* b = Block::forPointer(al);
_free(al, b);
}
void getStatistics(HeapStatistics* stats);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked();
}; };
// //
// The LargeArena allocates objects where 3584 < size <1024*1024 bytes. // The LargeArena allocates objects where 3584 < size <1024*1024-CHUNK_SIZE-sizeof(LargeObject) bytes.
// //
// it maintains a set of size-segregated free lists, and a special // it maintains a set of size-segregated free lists, and a special
// free list for larger objects. If the free list specific to a given // free list for larger objects. If the free list specific to a given
// size has no entries, we search the large free list. // size has no entries, we search the large free list.
// //
class LargeArena : public Arena<LARGE_ARENA_START> { // Blocks of 1meg are mmap'ed individually, and carved up as needed.
struct LargeFreeChunk { //
LargeFreeChunk* next_size; class LargeArena : public Arena<LARGE_ARENA_START, ARENA_SIZE> {
size_t size; private:
};
struct LargeBlock { struct LargeBlock {
LargeBlock* next; LargeBlock* next;
size_t num_free_chunks; size_t num_free_chunks;
unsigned char* free_chunk_map; unsigned char* free_chunk_map;
}; };
struct LargeFreeChunk {
LargeFreeChunk* next_size;
size_t size;
};
struct LargeObj { struct LargeObj {
LargeObj* next, **prev; LargeObj* next, **prev;
size_t size; size_t size;
GCAllocation data[0]; GCAllocation data[0];
static LargeObj* fromAllocation(GCAllocation* alloc) {
char* rtn = (char*)alloc - offsetof(LargeObj, data);
return reinterpret_cast<LargeObj*>(rtn);
}
}; };
/* /*
...@@ -311,20 +300,18 @@ class LargeArena : public Arena<LARGE_ARENA_START> { ...@@ -311,20 +300,18 @@ class LargeArena : public Arena<LARGE_ARENA_START> {
static constexpr int NUM_FREE_LISTS = 32; static constexpr int NUM_FREE_LISTS = 32;
void add_free_chunk(LargeFreeChunk* free_chunks, size_t size); Heap* heap;
LargeFreeChunk* get_from_size_list(LargeFreeChunk** list, size_t size);
LargeObj* _allocInternal(size_t size);
void _freeInternal(LargeObj* obj, size_t size);
void _free(LargeObj* obj);
LargeObj* head; LargeObj* head;
LargeBlock* blocks; LargeBlock* blocks;
LargeFreeChunk* free_lists[NUM_FREE_LISTS]; /* 0 is for larger sizes */ LargeFreeChunk* free_lists[NUM_FREE_LISTS]; /* 0 is for larger sizes */
Heap* heap; void add_free_chunk(LargeFreeChunk* free_chunks, size_t size);
LargeFreeChunk* get_from_size_list(LargeFreeChunk** list, size_t size);
LargeObj* _alloc(size_t size);
void _freeLargeObj(LargeObj* obj);
public: public:
LargeArena(Heap* heap) : head(NULL), blocks(NULL), heap(heap) {} LargeArena(Heap* heap) : heap(heap), head(NULL), blocks(NULL) {}
/* Largest object that can be allocated in a large block. */ /* Largest object that can be allocated in a large block. */
static constexpr size_t ALLOC_SIZE_LIMIT = BLOCK_SIZE - CHUNK_SIZE - sizeof(LargeObj); static constexpr size_t ALLOC_SIZE_LIMIT = BLOCK_SIZE - CHUNK_SIZE - sizeof(LargeObj);
...@@ -333,9 +320,9 @@ public: ...@@ -333,9 +320,9 @@ public:
GCAllocation* realloc(GCAllocation* alloc, size_t bytes); GCAllocation* realloc(GCAllocation* alloc, size_t bytes);
void free(GCAllocation* alloc); void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked(); void freeUnmarked();
GCAllocation* allocationFrom(void* ptr);
void getStatistics(HeapStatistics* stats); void getStatistics(HeapStatistics* stats);
}; };
...@@ -343,7 +330,20 @@ public: ...@@ -343,7 +330,20 @@ public:
// //
// Objects are allocated with individual mmap() calls, and kept in a // Objects are allocated with individual mmap() calls, and kept in a
// linked list. They are not reused. // linked list. They are not reused.
class HugeArena : public Arena<HUGE_ARENA_START> { class HugeArena : public Arena<HUGE_ARENA_START, ARENA_SIZE> {
public:
HugeArena(Heap* heap) : heap(heap) {}
GCAllocation* __attribute__((__malloc__)) alloc(size_t bytes);
GCAllocation* realloc(GCAllocation* alloc, size_t bytes);
void free(GCAllocation* alloc);
GCAllocation* allocationFrom(void* ptr);
void freeUnmarked();
void getStatistics(HeapStatistics* stats);
private:
struct HugeObj { struct HugeObj {
HugeObj* next, **prev; HugeObj* next, **prev;
size_t obj_size; size_t obj_size;
...@@ -369,18 +369,6 @@ class HugeArena : public Arena<HUGE_ARENA_START> { ...@@ -369,18 +369,6 @@ class HugeArena : public Arena<HUGE_ARENA_START> {
HugeObj* head; HugeObj* head;
Heap* heap; Heap* heap;
public:
HugeArena(Heap* heap) : heap(heap) {}
GCAllocation* __attribute__((__malloc__)) alloc(size_t bytes);
GCAllocation* realloc(GCAllocation* alloc, size_t bytes);
void free(GCAllocation* alloc);
void freeUnmarked();
GCAllocation* allocationFrom(void* ptr);
void getStatistics(HeapStatistics* stats);
}; };
...@@ -420,10 +408,10 @@ public: ...@@ -420,10 +408,10 @@ public:
return small_arena.alloc(bytes); return small_arena.alloc(bytes);
} }
void destroyContents(GCAllocation* alloc); void destructContents(GCAllocation* alloc);
void free(GCAllocation* alloc) { void free(GCAllocation* alloc) {
destroyContents(alloc); destructContents(alloc);
if (large_arena.contains(alloc)) { if (large_arena.contains(alloc)) {
large_arena.free(alloc); large_arena.free(alloc);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment