Skip to content

Commit

Permalink
Use native aligned alloc on Windows and POSIX. Deprecates "offset". S…
Browse files Browse the repository at this point in the history
…implification of the Allocator interface.
  • Loading branch information
lerno committed Feb 22, 2024
1 parent 299f48d commit d58c47b
Show file tree
Hide file tree
Showing 15 changed files with 329 additions and 204 deletions.
35 changes: 12 additions & 23 deletions lib/std/core/allocators/arena_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@ struct ArenaAllocatorHeader @local
char[*] data;
}

/*
* @require ptr != null
**/
fn void ArenaAllocator.release(&self, void* ptr, bool) @dynamic
{
if (!ptr) return;
assert((uptr)ptr >= (uptr)self.data.ptr, "Pointer originates from a different allocator.");
ArenaAllocatorHeader* header = ptr - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
Expand All @@ -41,29 +43,26 @@ fn void ArenaAllocator.release(&self, void* ptr, bool) @dynamic
self.used -= header.size + ArenaAllocatorHeader.sizeof;
}
}

fn usz ArenaAllocator.mark(&self) @dynamic => self.used;
fn void ArenaAllocator.reset(&self, usz mark) @dynamic => self.used = mark;

/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require size > 0
**/
fn void*! ArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
alignment = alignment_for_allocation(alignment);
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
void* start_mem = self.data.ptr;
void* unaligned_pointer_to_offset = start_mem + self.used + ArenaAllocatorHeader.sizeof + offset;
void* aligned_pointer_to_offset = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usz end = (usz)(aligned_pointer_to_offset - self.data.ptr) + size - offset;
void* unaligned_pointer_to_offset = start_mem + self.used + ArenaAllocatorHeader.sizeof;
void* mem = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usz end = (usz)(mem - self.data.ptr) + size;
if (end > total_len) return AllocationFailure.OUT_OF_MEMORY?;
self.used = end;
void* mem = aligned_pointer_to_offset - offset;
ArenaAllocatorHeader* header = mem - ArenaAllocatorHeader.sizeof;
header.size = size;
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
Expand All @@ -73,29 +72,19 @@ fn void*! ArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require old_pointer != null
* @require size > 0
**/
fn void*! ArenaAllocator.resize(&self, void *old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
alignment = alignment_for_allocation(alignment);
assert(old_pointer >= self.data.ptr, "Pointer originates from a different allocator.");
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
usz old_size = header.size;
// Do last allocation and alignment match?
if (&self.data[self.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer + offset, alignment))
if (&self.data[self.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
Expand All @@ -111,7 +100,7 @@ fn void*! ArenaAllocator.resize(&self, void *old_pointer, usz size, usz alignmen
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = self.acquire(size, false, alignment, offset)!;
void* mem = self.acquire(size, false, alignment, 0)!;
mem::copy(mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}
31 changes: 12 additions & 19 deletions lib/std/core/allocators/dynamic_arena.c3
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ struct DynamicArenaChunk @local
}

/**
* @require ptr
* @require self.page `tried to free pointer on invalid allocator`
*/
fn void DynamicArenaAllocator.release(&self, void* ptr, bool) @dynamic
{
if (!ptr) return;
DynamicArenaPage* current_page = self.page;
if (ptr == current_page.current_stack_ptr)
{
Expand All @@ -73,19 +73,12 @@ fn void DynamicArenaAllocator.release(&self, void* ptr, bool) @dynamic
}

/**
* @require size > 0 `Resize doesn't support zeroing`
* @require old_pointer != null `Resize doesn't handle null pointers`
* @require self.page `tried to realloc pointer on invalid allocator`
*/
fn void*! DynamicArenaAllocator.resize(&self, void* old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
DynamicArenaPage* current_page = self.page;
alignment = alignment_for_allocation(alignment);
usz* old_size_ptr = old_pointer - DEFAULT_SIZE_PREFIX;
Expand All @@ -109,7 +102,7 @@ fn void*! DynamicArenaAllocator.resize(&self, void* old_pointer, usz size, usz a
current_page.used += add_size;
return old_pointer;
}
void* new_mem = self.acquire(size, false, alignment, offset)!;
void* new_mem = self.acquire(size, false, alignment, 0)!;
mem::copy(new_mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
Expand All @@ -135,10 +128,10 @@ fn void DynamicArenaAllocator.reset(&self, usz mark = 0) @dynamic
* @require math::is_power_of_2(alignment)
* @require size > 0
*/
fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz offset) @local
fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment) @local
{
// First, make sure that we can align it, extending the page size if needed.
usz page_size = max(self.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof + offset, alignment) - offset);
usz page_size = max(self.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof, alignment));

// Grab the page without alignment (we do it ourselves)
void* mem = allocator::malloc_try(self.backing_allocator, page_size)!;
Expand All @@ -149,7 +142,7 @@ fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz o
return err?;
}
page.memory = mem;
void* mem_start = mem::aligned_pointer(mem + offset + DynamicArenaChunk.sizeof, alignment) - offset;
void* mem_start = mem::aligned_pointer(mem + DynamicArenaChunk.sizeof, alignment);
assert(mem_start + size < mem + page_size);
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem_start - 1;
chunk.size = size;
Expand All @@ -162,11 +155,11 @@ fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz o
}

/**
* @require size > 0 `acquire expects size > 0`
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = self.page;
void* ptr = {|
Expand All @@ -176,14 +169,14 @@ fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignme
self.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page) return self._alloc_new(size, alignment, offset);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
if (!page) return self._alloc_new(size, alignment);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
usz new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = self.unused_page))
{
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
new_used = start + size - page.memory;
if (page.total >= new_used)
{
Expand All @@ -193,7 +186,7 @@ fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignme
break ALLOCATE_NEW;
}
}
return self._alloc_new(size, alignment, offset);
return self._alloc_new(size, alignment);
}
page.used = new_used;
assert(start + size == page.memory + page.used);
Expand Down
16 changes: 3 additions & 13 deletions lib/std/core/allocators/heap_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -23,27 +23,17 @@ fn void SimpleHeapAllocator.init(&self, MemoryAllocFn allocator)

fn void*! SimpleHeapAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
if (clear)
{
return alignment > 0 ? @aligned_alloc(self._calloc, size, alignment, offset) : self._calloc(size);
return alignment > 0 ? @aligned_alloc(self._calloc, size, alignment) : self._calloc(size);
}
return alignment > 0 ? @aligned_alloc(self._alloc, size, alignment, offset) : self._alloc(size);
return alignment > 0 ? @aligned_alloc(self._alloc, size, alignment) : self._alloc(size);
}

fn void*! SimpleHeapAllocator.resize(&self, void* old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
return alignment > 0
? @aligned_realloc(self._calloc, self._free, old_pointer, size, alignment, offset)
? @aligned_realloc(self._calloc, self._free, old_pointer, size, alignment)
: self._realloc(old_pointer, size);
}

Expand Down
123 changes: 113 additions & 10 deletions lib/std/core/allocators/libc_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,36 @@ module std::core::mem::allocator;
import libc;

const LibcAllocator LIBC_ALLOCATOR = {};


distinct LibcAllocator (Allocator) = uptr;

module std::core::mem::allocator @if(env::POSIX);
import std::os;
import libc;

fn void*! LibcAllocator.acquire(&self, usz bytes, bool clear, usz alignment, usz offset) @dynamic
{
assert(alignment != 0 || offset == 0);
if (clear)
{
void* data = alignment ? @aligned_alloc(fn void*(usz bytes) => libc::calloc(bytes, 1), bytes, alignment, offset)!! : libc::calloc(bytes, 1);
return data ?: AllocationFailure.OUT_OF_MEMORY?;
void* data @noinit;
if (alignment > mem::DEFAULT_MEM_ALIGNMENT)
{
if (posix::posix_memalign(&data, alignment, bytes)) return AllocationFailure.OUT_OF_MEMORY?;
mem::clear(data, bytes, mem::DEFAULT_MEM_ALIGNMENT);
return data;
}
return libc::calloc(1, bytes) ?: AllocationFailure.OUT_OF_MEMORY?;
}
else
{
void* data = alignment ? @aligned_alloc(libc::malloc, bytes, alignment, offset)!! : libc::malloc(bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY?;
void* data @noinit;
if (alignment > mem::DEFAULT_MEM_ALIGNMENT)
{
if (posix::posix_memalign(&data, alignment, bytes)) return AllocationFailure.OUT_OF_MEMORY?;
}
else
{
if (!(data = libc::malloc(bytes))) return AllocationFailure.OUT_OF_MEMORY?;
}
$if env::TESTING:
for (usz i = 0; i < bytes; i++) ((char*)data)[i] = 0xAA;
$endif
Expand All @@ -31,19 +45,108 @@ fn void*! LibcAllocator.acquire(&self, usz bytes, bool clear, usz alignment, usz

fn void*! LibcAllocator.resize(&self, void* old_ptr, usz new_bytes, usz alignment, usz offset) @dynamic
{
assert(alignment != 0 || offset == 0);
if (!new_bytes)
{
self.release(old_ptr, alignment > 0);
return null;
}
if (!old_ptr)
{
return self.acquire(new_bytes, true, alignment, offset);
return self.acquire(new_bytes, false, alignment, 0);
}
if (alignment <= mem::DEFAULT_MEM_ALIGNMENT) return libc::realloc(old_ptr, new_bytes) ?: AllocationFailure.OUT_OF_MEMORY?;

void* new_ptr;
if (posix::posix_memalign(&new_ptr, alignment, new_bytes)) return AllocationFailure.OUT_OF_MEMORY?;

$switch
$case env::DARWIN:
usz old_usable_size = darwin::malloc_size(old_ptr);
$case env::LINUX:
usz old_usable_size = linux::malloc_usable_size(old_ptr);
$default:
usz old_usable_size = new_bytes;
$endswitch

usz copy_size = new_bytes < old_usable_size ? new_bytes : old_usable_size;
mem::copy(new_ptr, old_ptr, copy_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
libc::free(old_ptr);
return new_ptr;
}

fn void LibcAllocator.release(&self, void* old_ptr, bool aligned) @dynamic
{
libc::free(old_ptr);
}

module std::core::mem::allocator @if(env::WIN32);
import std::os::win32;
import libc;

fn void*! LibcAllocator.acquire(&self, usz bytes, bool clear, usz alignment, usz offset) @dynamic
{
if (clear)
{
if (alignment > 0)
{
return win32::_aligned_recalloc(null, bytes, alignment) ?: AllocationFailure.OUT_OF_MEMORY?;
}
return libc::calloc(1, bytes) ?: AllocationFailure.OUT_OF_MEMORY?;
}
void* data = alignment > 0 ? win32::_aligned_malloc(bytes, alignment) : libc::malloc(bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY?;
$if env::TESTING:
for (usz i = 0; i < bytes; i++) ((char*)data)[i] = 0xAA;
$endif
return data;
}

fn void*! LibcAllocator.resize(&self, void* old_ptr, usz new_bytes, usz alignment, usz offset) @dynamic
{
if (alignment)
{
return win32::_aligned_realloc(old_ptr, new_bytes, alignment) ?: AllocationFailure.OUT_OF_MEMORY?;
}
return libc::realloc(old_ptr, new_bytes) ?: AllocationFailure.OUT_OF_MEMORY?;
}

fn void LibcAllocator.release(&self, void* old_ptr, bool aligned) @dynamic
{
if (aligned)
{
win32::_aligned_free(old_ptr);
return;
}
libc::free(old_ptr);
}

module std::core::mem::allocator @if(!env::WIN32 && !env::POSIX);
import libc;

fn void*! LibcAllocator.acquire(&self, usz bytes, bool clear, usz alignment, usz offset) @dynamic
{
if (clear)
{
void* data = alignment ? @aligned_alloc(fn void*(usz bytes) => libc::calloc(bytes, 1), bytes, alignment)!! : libc::calloc(bytes, 1);
return data ?: AllocationFailure.OUT_OF_MEMORY?;
}
else
{
void* data = alignment ? @aligned_alloc(libc::malloc, bytes, alignment)!! : libc::malloc(bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY?;
$if env::TESTING:
for (usz i = 0; i < bytes; i++) ((char*)data)[i] = 0xAA;
$endif
return data;
}
}


fn void*! LibcAllocator.resize(&self, void* old_ptr, usz new_bytes, usz alignment, usz offset) @dynamic
{
if (alignment)
{
void* data = @aligned_realloc(fn void*(usz bytes) => libc::calloc(bytes, 1), libc::free, old_ptr, new_bytes, alignment, offset)!!;
void* data = @aligned_realloc(fn void*(usz bytes) => libc::malloc(bytes), libc::free, old_ptr, new_bytes, alignment)!!;
return data ?: AllocationFailure.OUT_OF_MEMORY?;
}
return libc::realloc(old_ptr, new_bytes) ?: AllocationFailure.OUT_OF_MEMORY?;
Expand Down
Loading

0 comments on commit d58c47b

Please sign in to comment.