Skip to content

Use new nightly GlobalAlloc API #11

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Apr 18, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions src/hole.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use core::ptr::NonNull;
use core::mem::size_of;
use alloc::allocator::{Layout, AllocErr};
use core::alloc::{Layout, Opaque, AllocErr};

use super::align_up;

Expand Down Expand Up @@ -42,7 +43,7 @@ impl HoleList {
/// block is returned.
/// This function uses the “first fit” strategy, so it uses the first hole that is big
/// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
assert!(layout.size() >= Self::min_size());

allocate_first_fit(&mut self.first, layout).map(|allocation| {
Expand All @@ -52,7 +53,7 @@ impl HoleList {
if let Some(padding) = allocation.back_padding {
deallocate(&mut self.first, padding.addr, padding.size);
}
allocation.info.addr as *mut u8
NonNull::new(allocation.info.addr as *mut Opaque).unwrap()
})
}

Expand All @@ -62,8 +63,8 @@ impl HoleList {
/// This function walks the list and inserts the given block at the correct place. If the freed
/// block is adjacent to another free block, the blocks are merged again.
/// This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
deallocate(&mut self.first, ptr as usize, layout.size())
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size())
}

/// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed.
Expand Down Expand Up @@ -199,7 +200,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<Allocat
}
None => {
// this was the last hole, so no hole is big enough -> allocation not possible
return Err(AllocErr::Exhausted { request: layout });
return Err(AllocErr);
}
}
}
Expand Down
31 changes: 16 additions & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
#![feature(pointer_methods)]
#![no_std]

extern crate alloc;

#[cfg(test)]
#[macro_use]
extern crate std;
Expand All @@ -14,9 +12,10 @@ extern crate spin;

use hole::{Hole, HoleList};
use core::mem;
use core::ptr::NonNull;
#[cfg(feature = "use_spin")]
use core::ops::Deref;
use alloc::allocator::{Alloc, Layout, AllocErr};
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque};
#[cfg(feature = "use_spin")]
use spin::Mutex;

Expand Down Expand Up @@ -70,7 +69,7 @@ impl Heap {
/// This function scans the list of free memory blocks and uses the first block that is big
/// enough. The runtime is in O(n) where n is the number of free blocks, but it should be
/// reasonably fast for small allocations.
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand All @@ -88,7 +87,7 @@ impl Heap {
/// This function walks the list of free memory blocks and inserts the freed block at the
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) {
pub unsafe fn deallocate(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
let mut size = layout.size();
if size < HoleList::min_size() {
size = HoleList::min_size();
Expand Down Expand Up @@ -122,21 +121,21 @@ impl Heap {
pub unsafe fn extend(&mut self, by: usize) {
let top = self.top();
let layout = Layout::from_size_align(by, 1).unwrap();
self.holes.deallocate(top as *mut u8, layout);
self.holes.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout);
self.size += by;
}
}

unsafe impl Alloc for Heap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
self.allocate_first_fit(layout)
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
self.deallocate(ptr, layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&mut self) -> ! {
panic!("Out of memory");
}
}
Expand Down Expand Up @@ -174,16 +173,18 @@ impl Deref for LockedHeap {
}

#[cfg(feature = "use_spin")]
unsafe impl<'a> Alloc for &'a LockedHeap {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
self.0.lock().allocate_first_fit(layout)
unsafe impl GlobalAlloc for LockedHeap {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
self.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| {
allocation.as_ptr()
})
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
self.0.lock().deallocate(ptr, layout)
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
self.0.lock().deallocate(NonNull::new_unchecked(ptr), layout)
}

fn oom(&mut self, _: AllocErr) -> ! {
fn oom(&self) -> ! {
panic!("Out of memory");
}
}
Expand Down
32 changes: 16 additions & 16 deletions src/test.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::prelude::v1::*;
use std::mem::{size_of, align_of};
use alloc::allocator::Layout;
use core::alloc::Layout;
use super::*;

fn new_heap() -> Heap {
Expand Down Expand Up @@ -46,7 +46,7 @@ fn allocate_double_usize() {
let layout = Layout::from_size_align(size, align_of::<usize>());
let addr = heap.allocate_first_fit(layout.unwrap());
assert!(addr.is_ok());
let addr = addr.unwrap() as usize;
let addr = addr.unwrap().as_ptr() as usize;
assert!(addr == heap.bottom);
let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left");
assert!(hole_addr == heap.bottom + size);
Expand All @@ -64,7 +64,7 @@ fn allocate_and_free_double_usize() {
let layout = Layout::from_size_align(size_of::<usize>() * 2, align_of::<usize>()).unwrap();
let x = heap.allocate_first_fit(layout.clone()).unwrap();
unsafe {
*(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
*(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);

heap.deallocate(x, layout.clone());
assert_eq!((*(heap.bottom as *const Hole)).size, heap.size);
Expand All @@ -83,11 +83,11 @@ fn deallocate_right_before() {

unsafe {
heap.deallocate(y, layout.clone());
assert_eq!((*(y as *const Hole)).size, layout.size());
assert_eq!((*(y.as_ptr() as *const Hole)).size, layout.size());
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, layout.size() * 2);
assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand All @@ -103,11 +103,11 @@ fn deallocate_right_behind() {

unsafe {
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
heap.deallocate(y, layout.clone());
assert_eq!((*(x as *const Hole)).size, size * 2);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand All @@ -124,14 +124,14 @@ fn deallocate_middle() {

unsafe {
heap.deallocate(x, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
heap.deallocate(z, layout.clone());
assert_eq!((*(x as *const Hole)).size, size);
assert_eq!((*(z as *const Hole)).size, size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size);
assert_eq!((*(z.as_ptr() as *const Hole)).size, size);
heap.deallocate(y, layout.clone());
assert_eq!((*(x as *const Hole)).size, size * 3);
assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3);
heap.deallocate(a, layout.clone());
assert_eq!((*(x as *const Hole)).size, heap.size);
assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size);
}
}

Expand Down Expand Up @@ -167,9 +167,9 @@ fn allocate_multiple_sizes() {

let x = heap.allocate_first_fit(layout_1.clone()).unwrap();
let y = heap.allocate_first_fit(layout_2.clone()).unwrap();
assert_eq!(y as usize, x as usize + base_size * 2);
assert_eq!(y.as_ptr() as usize, x.as_ptr() as usize + base_size * 2);
let z = heap.allocate_first_fit(layout_3.clone()).unwrap();
assert_eq!(z as usize % (base_size * 4), 0);
assert_eq!(z.as_ptr() as usize % (base_size * 4), 0);

unsafe {
heap.deallocate(x, layout_1.clone());
Expand Down