Skip to content

Commit

Permalink
0.5.5 features (#1151)
Browse files Browse the repository at this point in the history
0.5.5 Disallow multiple `_` in a row in digits, e.g. `1__000`. #1138. Fixed toposort example. Struct/union members now correctly rejects members without storage size #1147. `math::pow` will now correctly promote integer arguments. `math::pow` will now correctly promote integer arguments. Added `new_aligned` and `alloc_aligned` functions to prevent accidental under-alignment when allocating simd. Pointer difference would fail where alignment != size (structs etc) #1150. Add test that overalignment actually works for lists. Fixed array calculation for npot2 vectors. Use native aligned alloc on Windows and POSIX. Deprecates "offset". Simplification of the Allocator interface.
  • Loading branch information
lerno authored Feb 22, 2024
1 parent b7f4fd9 commit 7ea3d23
Show file tree
Hide file tree
Showing 39 changed files with 746 additions and 307 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ jobs:
- name: Compile and run some examples
run: |
cd resources
cd resources
../build/c3c compile examples/base64.c3
../build/c3c compile examples/binarydigits.c3
../build/c3c compile examples/brainfk.c3
Expand All @@ -285,7 +285,7 @@ jobs:
../build/c3c compile examples/contextfree/dynscope.c3
../build/c3c compile examples/contextfree/guess_number.c3
../build/c3c compile examples/contextfree/multi.c3
../build/c3c compile examples/contextfree/cleanup.c3
../build/c3c compile examples/contextfree/cleanup.c3
../build/c3c compile-run examples/hello_world_many.c3
../build/c3c compile-run examples/time.c3
../build/c3c compile-run examples/fannkuch-redux.c3
Expand Down
35 changes: 12 additions & 23 deletions lib/std/core/allocators/arena_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,11 @@ struct ArenaAllocatorHeader @local
char[*] data;
}

/*
* @require ptr != null
**/
fn void ArenaAllocator.release(&self, void* ptr, bool) @dynamic
{
if (!ptr) return;
assert((uptr)ptr >= (uptr)self.data.ptr, "Pointer originates from a different allocator.");
ArenaAllocatorHeader* header = ptr - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
Expand All @@ -41,29 +43,26 @@ fn void ArenaAllocator.release(&self, void* ptr, bool) @dynamic
self.used -= header.size + ArenaAllocatorHeader.sizeof;
}
}

fn usz ArenaAllocator.mark(&self) @dynamic => self.used;
fn void ArenaAllocator.reset(&self, usz mark) @dynamic => self.used = mark;

/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require size > 0
**/
fn void*! ArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
alignment = alignment_for_allocation(alignment);
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
void* start_mem = self.data.ptr;
void* unaligned_pointer_to_offset = start_mem + self.used + ArenaAllocatorHeader.sizeof + offset;
void* aligned_pointer_to_offset = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usz end = (usz)(aligned_pointer_to_offset - self.data.ptr) + size - offset;
void* unaligned_pointer_to_offset = start_mem + self.used + ArenaAllocatorHeader.sizeof;
void* mem = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usz end = (usz)(mem - self.data.ptr) + size;
if (end > total_len) return AllocationFailure.OUT_OF_MEMORY?;
self.used = end;
void* mem = aligned_pointer_to_offset - offset;
ArenaAllocatorHeader* header = mem - ArenaAllocatorHeader.sizeof;
header.size = size;
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
Expand All @@ -73,29 +72,19 @@ fn void*! ArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require old_pointer != null
* @require size > 0
**/
fn void*! ArenaAllocator.resize(&self, void *old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
alignment = alignment_for_allocation(alignment);
assert(old_pointer >= self.data.ptr, "Pointer originates from a different allocator.");
usz total_len = self.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE?;
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
usz old_size = header.size;
// Do last allocation and alignment match?
if (&self.data[self.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer + offset, alignment))
if (&self.data[self.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
Expand All @@ -111,7 +100,7 @@ fn void*! ArenaAllocator.resize(&self, void *old_pointer, usz size, usz alignmen
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = self.acquire(size, false, alignment, offset)!;
void* mem = self.acquire(size, false, alignment, 0)!;
mem::copy(mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}
31 changes: 12 additions & 19 deletions lib/std/core/allocators/dynamic_arena.c3
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@ struct DynamicArenaChunk @local
}

/**
* @require ptr
* @require self.page `tried to free pointer on invalid allocator`
*/
fn void DynamicArenaAllocator.release(&self, void* ptr, bool) @dynamic
{
if (!ptr) return;
DynamicArenaPage* current_page = self.page;
if (ptr == current_page.current_stack_ptr)
{
Expand All @@ -73,19 +73,12 @@ fn void DynamicArenaAllocator.release(&self, void* ptr, bool) @dynamic
}

/**
* @require size > 0 `Resize doesn't support zeroing`
* @require old_pointer != null `Resize doesn't handle null pointers`
* @require self.page `tried to realloc pointer on invalid allocator`
*/
fn void*! DynamicArenaAllocator.resize(&self, void* old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
DynamicArenaPage* current_page = self.page;
alignment = alignment_for_allocation(alignment);
usz* old_size_ptr = old_pointer - DEFAULT_SIZE_PREFIX;
Expand All @@ -109,7 +102,7 @@ fn void*! DynamicArenaAllocator.resize(&self, void* old_pointer, usz size, usz a
current_page.used += add_size;
return old_pointer;
}
void* new_mem = self.acquire(size, false, alignment, offset)!;
void* new_mem = self.acquire(size, false, alignment, 0)!;
mem::copy(new_mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
Expand All @@ -135,10 +128,10 @@ fn void DynamicArenaAllocator.reset(&self, usz mark = 0) @dynamic
* @require math::is_power_of_2(alignment)
* @require size > 0
*/
fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz offset) @local
fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment) @local
{
// First, make sure that we can align it, extending the page size if needed.
usz page_size = max(self.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof + offset, alignment) - offset);
usz page_size = max(self.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof, alignment));

// Grab the page without alignment (we do it ourselves)
void* mem = allocator::malloc_try(self.backing_allocator, page_size)!;
Expand All @@ -149,7 +142,7 @@ fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz o
return err?;
}
page.memory = mem;
void* mem_start = mem::aligned_pointer(mem + offset + DynamicArenaChunk.sizeof, alignment) - offset;
void* mem_start = mem::aligned_pointer(mem + DynamicArenaChunk.sizeof, alignment);
assert(mem_start + size < mem + page_size);
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem_start - 1;
chunk.size = size;
Expand All @@ -162,11 +155,11 @@ fn void*! DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment, usz o
}

/**
* @require size > 0 `acquire expects size > 0`
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = self.page;
void* ptr = {|
Expand All @@ -176,14 +169,14 @@ fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignme
self.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page) return self._alloc_new(size, alignment, offset);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
if (!page) return self._alloc_new(size, alignment);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
usz new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = self.unused_page))
{
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
new_used = start + size - page.memory;
if (page.total >= new_used)
{
Expand All @@ -193,7 +186,7 @@ fn void*! DynamicArenaAllocator.acquire(&self, usz size, bool clear, usz alignme
break ALLOCATE_NEW;
}
}
return self._alloc_new(size, alignment, offset);
return self._alloc_new(size, alignment);
}
page.used = new_used;
assert(start + size == page.memory + page.used);
Expand Down
16 changes: 3 additions & 13 deletions lib/std/core/allocators/heap_allocator.c3
Original file line number Diff line number Diff line change
Expand Up @@ -23,27 +23,17 @@ fn void SimpleHeapAllocator.init(&self, MemoryAllocFn allocator)

fn void*! SimpleHeapAllocator.acquire(&self, usz size, bool clear, usz alignment, usz offset) @dynamic
{
if (!size) return null;
if (clear)
{
return alignment > 0 ? @aligned_calloc(self._calloc, size, alignment, offset) : self._calloc(size);
return alignment > 0 ? @aligned_alloc(self._calloc, size, alignment) : self._calloc(size);
}
return alignment > 0 ? @aligned_alloc(self._alloc, size, alignment, offset) : self._alloc(size);
return alignment > 0 ? @aligned_alloc(self._alloc, size, alignment) : self._alloc(size);
}

fn void*! SimpleHeapAllocator.resize(&self, void* old_pointer, usz size, usz alignment, usz offset) @dynamic
{
if (!size)
{
self.release(old_pointer, alignment > 0);
return null;
}
if (!old_pointer)
{
return self.acquire(size, true, alignment, offset);
}
return alignment > 0
? @aligned_realloc(self._calloc, self._free, old_pointer, size, alignment, offset)
? @aligned_realloc(self._calloc, self._free, old_pointer, size, alignment)
: self._realloc(old_pointer, size);
}

Expand Down
Loading

0 comments on commit 7ea3d23

Please sign in to comment.