Commit 9415d7c1 authored by Kevin Modzelewski's avatar Kevin Modzelewski

Merge pull request #632 from toshok/mmap-larger-regions

don't mmap such small regions
parents 06a07a2c cdfbb8ee
...@@ -469,7 +469,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weak ...@@ -469,7 +469,7 @@ SmallArena::Block** SmallArena::_freeChain(Block** head, std::vector<Box*>& weak
SmallArena::Block* SmallArena::_allocBlock(uint64_t size, Block** prev) { SmallArena::Block* SmallArena::_allocBlock(uint64_t size, Block** prev) {
Block* rtn = (Block*)doMmap(sizeof(Block)); Block* rtn = (Block*)allocFromArena(sizeof(Block));
assert(rtn); assert(rtn);
rtn->size = size; rtn->size = size;
rtn->num_obj = BLOCK_SIZE / size; rtn->num_obj = BLOCK_SIZE / size;
...@@ -752,7 +752,7 @@ retry: ...@@ -752,7 +752,7 @@ retry:
if (free_chunks) if (free_chunks)
return (LargeObj*)free_chunks; return (LargeObj*)free_chunks;
section = (LargeBlock*)doMmap(BLOCK_SIZE); section = (LargeBlock*)allocFromArena(BLOCK_SIZE);
if (!section) if (!section)
return NULL; return NULL;
...@@ -819,7 +819,8 @@ GCAllocation* HugeArena::alloc(size_t size) { ...@@ -819,7 +819,8 @@ GCAllocation* HugeArena::alloc(size_t size) {
size_t total_size = size + sizeof(HugeObj); size_t total_size = size + sizeof(HugeObj);
total_size = (total_size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1); total_size = (total_size + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1);
HugeObj* rtn = (HugeObj*)doMmap(total_size); extendMapping(total_size);
HugeObj* rtn = (HugeObj*)allocFromArena(total_size);
rtn->obj_size = size; rtn->obj_size = size;
nullNextPrev(rtn); nullNextPrev(rtn);
......
...@@ -132,27 +132,44 @@ inline void clearMark(GCAllocation* header) { ...@@ -132,27 +132,44 @@ inline void clearMark(GCAllocation* header) {
#define PAGE_SIZE 4096 #define PAGE_SIZE 4096
template <uintptr_t arena_start, uintptr_t arena_size> class Arena { template <uintptr_t arena_start, uintptr_t arena_size, uintptr_t initial_mapsize, uintptr_t increment> class Arena {
private: private:
void* cur; void* cur;
void* end; void* frontier;
void* arena_end;
protected: protected:
Arena() : cur((void*)arena_start), end((void*)(arena_start + arena_size)) {} Arena() : cur((void*)arena_start), frontier((void*)arena_start), arena_end((void*)(arena_start + arena_size)) {
if (initial_mapsize)
extendMapping(initial_mapsize);
}
public: // extends the mapping for this arena
void* doMmap(size_t size) { void extendMapping(size_t size) {
assert(size % PAGE_SIZE == 0); assert(size % PAGE_SIZE == 0);
assert(((uint8_t*)cur + size) < end && "arena full"); assert(((uint8_t*)frontier + size) < arena_end && "arena full");
void* mrtn = mmap(cur, size, PROT_READ | PROT_WRITE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); void* mrtn = mmap(frontier, size, PROT_READ | PROT_WRITE, MAP_FIXED | MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
RELEASE_ASSERT((uintptr_t)mrtn != -1, "failed to allocate memory from OS"); RELEASE_ASSERT((uintptr_t)mrtn != -1, "failed to allocate memory from OS");
ASSERT(mrtn == cur, "%p %p\n", mrtn, cur); ASSERT(mrtn == frontier, "%p %p\n", mrtn, cur);
frontier = (uint8_t*)frontier + size;
}
void* allocFromArena(size_t size) {
if (((char*)cur + size) >= (char*)frontier) {
// grow the arena by a multiple of increment such that we can service the allocation request
size_t grow_size = (size + increment - 1) & ~(increment - 1);
extendMapping(grow_size);
}
void* rtn = cur;
cur = (uint8_t*)cur + size; cur = (uint8_t*)cur + size;
return mrtn; return rtn;
} }
public:
bool contains(void* addr) { return (void*)arena_start <= addr && addr < cur; } bool contains(void* addr) { return (void*)arena_start <= addr && addr < cur; }
}; };
...@@ -175,7 +192,7 @@ static const size_t sizes[] = { ...@@ -175,7 +192,7 @@ static const size_t sizes[] = {
static constexpr size_t NUM_BUCKETS = sizeof(sizes) / sizeof(sizes[0]); static constexpr size_t NUM_BUCKETS = sizeof(sizes) / sizeof(sizes[0]);
class SmallArena : public Arena<SMALL_ARENA_START, ARENA_SIZE> { class SmallArena : public Arena<SMALL_ARENA_START, ARENA_SIZE, 64 * 1024 * 1024, 16 * 1024 * 1024> {
public: public:
SmallArena(Heap* heap) : Arena(), heap(heap), thread_caches(heap, this) { SmallArena(Heap* heap) : Arena(), heap(heap), thread_caches(heap, this) {
#ifndef NDEBUG #ifndef NDEBUG
...@@ -353,7 +370,7 @@ private: ...@@ -353,7 +370,7 @@ private:
// //
// Blocks of 1meg are mmap'ed individually, and carved up as needed. // Blocks of 1meg are mmap'ed individually, and carved up as needed.
// //
class LargeArena : public Arena<LARGE_ARENA_START, ARENA_SIZE> { class LargeArena : public Arena<LARGE_ARENA_START, ARENA_SIZE, 32 * 1024 * 1024, 16 * 1024 * 1024> {
private: private:
struct LargeBlock { struct LargeBlock {
LargeBlock* next; LargeBlock* next;
...@@ -420,7 +437,7 @@ public: ...@@ -420,7 +437,7 @@ public:
// //
// Objects are allocated with individual mmap() calls, and kept in a // Objects are allocated with individual mmap() calls, and kept in a
// linked list. They are not reused. // linked list. They are not reused.
class HugeArena : public Arena<HUGE_ARENA_START, ARENA_SIZE> { class HugeArena : public Arena<HUGE_ARENA_START, ARENA_SIZE, 0, PAGE_SIZE> {
public: public:
HugeArena(Heap* heap) : heap(heap) {} HugeArena(Heap* heap) : heap(heap) {}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment