Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "linked_list_allocator"
version = "0.8.3"
version = "0.8.4"
authors = ["Philipp Oppermann <dev@phil-opp.com>"]
license = "Apache-2.0/MIT"

Expand Down
4 changes: 4 additions & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# 0.8.4

- Add function to get used and free heap size ([#32](https://github.com/phil-opp/linked-list-allocator/pull/32))

# 0.8.3

- Prevent writing to heap memory range when size too small ([#31](https://github.com/phil-opp/linked-list-allocator/pull/31))
Expand Down
50 changes: 35 additions & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ extern crate alloc;
use alloc::alloc::Layout;
#[cfg(feature = "alloc_ref")]
use alloc::alloc::{AllocErr, AllocInit, AllocRef, MemoryBlock};
#[cfg(feature = "use_spin")]
use core::alloc::GlobalAlloc;
use core::mem;
#[cfg(feature = "use_spin")]
Expand All @@ -31,6 +32,7 @@ mod test;
pub struct Heap {
bottom: usize,
size: usize,
used: usize,
holes: HoleList,
}

Expand All @@ -40,6 +42,7 @@ impl Heap {
Heap {
bottom: 0,
size: 0,
used: 0,
holes: HoleList::empty(),
}
}
Expand All @@ -53,6 +56,7 @@ impl Heap {
pub unsafe fn init(&mut self, heap_bottom: usize, heap_size: usize) {
self.bottom = heap_bottom;
self.size = heap_size;
self.used = 0;
self.holes = HoleList::new(heap_bottom, heap_size);
}

Expand All @@ -67,25 +71,35 @@ impl Heap {
Heap {
bottom: heap_bottom,
size: heap_size,
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
}
}
}

/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `None`.
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
pub fn align_layout(&self, layout: Layout) -> Layout {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
}
let size = align_up(size, mem::align_of::<Hole>());
let layout = Layout::from_size_align(size, layout.align()).unwrap();

self.holes.allocate_first_fit(layout)
layout
}

/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `None`.
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
let aligned_layout = self.align_layout(layout);
let res = self.holes.allocate_first_fit(aligned_layout);
if res.is_ok() {
self.used += aligned_layout.size();
}
res
}

/// Frees the given allocation. `ptr` must be a pointer returned
Expand All @@ -96,14 +110,9 @@ impl Heap {
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
}
let size = align_up(size, mem::align_of::<Hole>());
let layout = Layout::from_size_align(size, layout.align()).unwrap();

self.holes.deallocate(ptr, layout);
let aligned_layout = self.align_layout(layout);
self.holes.deallocate(ptr, aligned_layout);
self.used -= aligned_layout.size();
}

/// Returns the bottom address of the heap.
Expand All @@ -121,6 +130,16 @@ impl Heap {
self.bottom + self.size
}

/// Returns the size of the used part of the heap
pub fn used(&self) -> usize {
self.used
}

/// Returns the size of the free part of the heap
pub fn free(&self) -> usize {
self.size - self.used
}

/// Extends the size of the heap by creating a new hole at the end
///
/// # Unsafety
Expand Down Expand Up @@ -182,6 +201,7 @@ impl LockedHeap {
LockedHeap(Spinlock::new(Heap {
bottom: heap_bottom,
size: heap_size,
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
}))
}
Expand Down