Skip to content

Commit

Permalink
Rename arenaAlloc to fix collision with GHC RTS (#992)
Browse files Browse the repository at this point in the history
This fixes a rather unpleasant bug when using the C bindings from the
Haskell backend, whereby [recent
changes](https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=246462) to
the dynamic loader on macOS break symbol lookup from dynamic libraries.

The result of this change is that calls to the backend's `arenaAlloc`
function were ending up in the Haskell RTS's function of the same name,
meaning that we got garbage memory allocated all over the place. There's
no sensible fix here other than to rename our function and avoid the
collision by doing so.
  • Loading branch information
Baltoli committed Feb 27, 2024
1 parent 97bf258 commit d02bebd
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion include/runtime/arena.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ char getArenaSemispaceIDOfObject(void *);
// pointer to the first allocated byte.
// If called with requested size greater than the maximun single allocation
// size, the space is allocated in a general (not garbage collected pool).
void *arenaAlloc(struct arena *, size_t);
void *koreArenaAlloc(struct arena *, size_t);

// Resizes the last allocation as long as the resize does not require a new
// block allocation.
Expand Down
10 changes: 5 additions & 5 deletions runtime/alloc/alloc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,25 +69,25 @@ void setKoreMemoryFunctionsForGMP() {
}

__attribute__((always_inline)) void *koreAlloc(size_t requested) {
return arenaAlloc(&youngspace, requested);
return koreArenaAlloc(&youngspace, requested);
}

__attribute__((always_inline)) void *koreAllocToken(size_t requested) {
size_t size = (requested + 7) & ~7;
return arenaAlloc(&youngspace, size < 16 ? 16 : size);
return koreArenaAlloc(&youngspace, size < 16 ? 16 : size);
}

__attribute__((always_inline)) void *koreAllocOld(size_t requested) {
return arenaAlloc(&oldspace, requested);
return koreArenaAlloc(&oldspace, requested);
}

__attribute__((always_inline)) void *koreAllocTokenOld(size_t requested) {
size_t size = (requested + 7) & ~7;
return arenaAlloc(&oldspace, size < 16 ? 16 : size);
return koreArenaAlloc(&oldspace, size < 16 ? 16 : size);
}

__attribute__((always_inline)) void *koreAllocAlwaysGC(size_t requested) {
return arenaAlloc(&alwaysgcspace, requested);
return koreArenaAlloc(&alwaysgcspace, requested);
}

void *koreResizeLastAlloc(void *oldptr, size_t newrequest, size_t last_size) {
Expand Down
2 changes: 1 addition & 1 deletion runtime/alloc/arena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ doAllocSlow(size_t requested, struct arena *Arena) {
}

__attribute__((always_inline)) void *
arenaAlloc(struct arena *Arena, size_t requested) {
koreArenaAlloc(struct arena *Arena, size_t requested) {
if (Arena->block + requested > Arena->block_end) {
return doAllocSlow(requested, Arena);
}
Expand Down
2 changes: 1 addition & 1 deletion runtime/collect/collect.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ void koreCollect(void **roots, uint8_t nroots, layoutitem *typeInfo) {
// sequence at the start of the collection cycle. This means that the
// allocation pointer is invalid and does not actually point to the next
// address that would have been allocated at, according to the logic of
// arenaAlloc, which will have allocated a fresh memory block and put
// koreArenaAlloc, which will have allocated a fresh memory block and put
// the allocation at the start of it. Thus, we use movePtr with a size
// of zero to adjust and get the true address of the allocation.
scan_ptr = movePtr(previous_oldspace_alloc_ptr, 0, *old_alloc_ptr());
Expand Down

0 comments on commit d02bebd

Please sign in to comment.