From a46761d19e751d7fd2cb154b145b22220fef4b0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20W=C4=99c=C5=82awski?= Date: Mon, 16 Apr 2018 00:43:14 +0200 Subject: [PATCH 1/3] Use new GlobalAlloc API --- src/hole.rs | 16 ++++++------ src/lib.rs | 37 +++++++++++++++------------ src/test.rs | 74 ++++++++++++++++++++++++++--------------------------- 3 files changed, 65 insertions(+), 62 deletions(-) diff --git a/src/hole.rs b/src/hole.rs index 30713e9..6d6d645 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -1,5 +1,5 @@ use core::mem::size_of; -use alloc::allocator::{Layout, AllocErr}; +use core::alloc::{Layout, Opaque}; use super::align_up; @@ -42,17 +42,17 @@ impl HoleList { /// block is returned. /// This function uses the “first fit” strategy, so it uses the first hole that is big /// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque { assert!(layout.size() >= Self::min_size()); - allocate_first_fit(&mut self.first, layout).map(|allocation| { + allocate_first_fit(&mut self.first, layout).map_or(0 as *mut Opaque, |allocation| { if let Some(padding) = allocation.front_padding { deallocate(&mut self.first, padding.addr, padding.size); } if let Some(padding) = allocation.back_padding { deallocate(&mut self.first, padding.addr, padding.size); } - allocation.info.addr as *mut u8 + allocation.info.addr as *mut Opaque }) } @@ -62,7 +62,7 @@ impl HoleList { /// This function walks the list and inserts the given block at the correct place. If the freed /// block is adjacent to another free block, the blocks are merged again. /// This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { + pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) { deallocate(&mut self.first, ptr as usize, layout.size()) } @@ -182,7 +182,7 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option { /// care of freeing it again. /// This function uses the “first fit” strategy, so it breaks as soon as a big enough hole is /// found (and returns it). -fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { +fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Option { loop { let allocation: Option = previous.next.as_mut().and_then(|current| { split_hole(current.info(), layout.clone()) @@ -191,7 +191,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { // hole is big enough, so remove it from the list by updating the previous pointer previous.next = previous.next.as_mut().unwrap().next.take(); - return Ok(allocation); + return Some(allocation); } None if previous.next.is_some() => { // try next hole @@ -199,7 +199,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { // this was the last hole, so no hole is big enough -> allocation not possible - return Err(AllocErr::Exhausted { request: layout }); + return None; } } } diff --git a/src/lib.rs b/src/lib.rs index 9ddba46..cff78f2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,7 @@ use hole::{Hole, HoleList}; use core::mem; #[cfg(feature = "use_spin")] use core::ops::Deref; -use alloc::allocator::{Alloc, Layout, AllocErr}; +use core::alloc::{GlobalAlloc, Layout, Opaque}; #[cfg(feature = "use_spin")] use spin::Mutex; @@ -24,6 +24,11 @@ mod hole; #[cfg(test)] mod test; +#[cfg(feature = "use_spin")] +pub static mut LOCKED_ALLOCATOR: LockedHeap = LockedHeap::empty(); + +pub static mut ALLOCATOR: Heap = Heap::empty(); + /// A fixed size heap backed by a linked list of free memory blocks. pub struct Heap { bottom: usize, @@ -70,7 +75,7 @@ impl Heap { /// This function scans the list of free memory blocks and uses the first block that is big /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be /// reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { + pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -88,7 +93,7 @@ impl Heap { /// This function walks the list of free memory blocks and inserts the freed block at the /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut u8, layout: Layout) { + pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -122,21 +127,21 @@ impl Heap { pub unsafe fn extend(&mut self, by: usize) { let top = self.top(); let layout = Layout::from_size_align(by, 1).unwrap(); - self.holes.deallocate(top as *mut u8, layout); + self.holes.deallocate(top as *mut Opaque, layout); self.size += by; } } -unsafe impl Alloc for Heap { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - self.allocate_first_fit(layout) +unsafe impl GlobalAlloc for Heap { + unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + ALLOCATOR.allocate_first_fit(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - self.deallocate(ptr, layout) + unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + ALLOCATOR.deallocate(ptr, layout) } - fn oom(&mut self, _: AllocErr) -> ! { + fn oom(&self) -> ! { panic!("Out of memory"); } } @@ -174,16 +179,16 @@ impl Deref for LockedHeap { } #[cfg(feature = "use_spin")] -unsafe impl<'a> Alloc for &'a LockedHeap { - unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> { - self.0.lock().allocate_first_fit(layout) +unsafe impl<'a> GlobalAlloc for &'a LockedHeap { + unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { + LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout) } - unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) { - self.0.lock().deallocate(ptr, layout) + unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { + LOCKED_ALLOCATOR.0.lock().deallocate(ptr, layout) } - fn oom(&mut self, _: AllocErr) -> ! { + fn oom(&self) -> ! { panic!("Out of memory"); } } diff --git a/src/test.rs b/src/test.rs index f60e85a..265ec1d 100644 --- a/src/test.rs +++ b/src/test.rs @@ -28,7 +28,7 @@ fn new_max_heap() -> Heap { fn empty() { let mut heap = Heap::empty(); let layout = Layout::from_size_align(1, 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()).is_err()); + assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 == core::ptr::null_mut()); } #[test] @@ -36,7 +36,7 @@ fn oom() { let mut heap = new_heap(); let layout = Layout::from_size_align(heap.size() + 1, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); - assert!(addr.is_err()); + assert!(addr as *mut u8 == core::ptr::null_mut()); } #[test] @@ -45,15 +45,14 @@ fn allocate_double_usize() { let size = size_of::() * 2; let layout = Layout::from_size_align(size, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); - assert!(addr.is_ok()); - let addr = addr.unwrap() as usize; - assert!(addr == heap.bottom); + assert!(addr as *mut u8 != core::ptr::null_mut()); + assert!(addr as usize == heap.bottom); let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left"); assert!(hole_addr == heap.bottom + size); assert!(hole_size == heap.size - size); unsafe { - assert_eq!((*((addr + size) as *const Hole)).size, heap.size - size); + assert_eq!((*((addr as usize + size) as *const Hole)).size, heap.size - size); } } @@ -62,7 +61,7 @@ fn allocate_and_free_double_usize() { let mut heap = new_heap(); let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); - let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()); unsafe { *(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); @@ -77,9 +76,9 @@ fn deallocate_right_before() { let mut heap = new_heap(); let layout = Layout::from_size_align(size_of::() * 5, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()).unwrap(); - let y = heap.allocate_first_fit(layout.clone()).unwrap(); - let z = heap.allocate_first_fit(layout.clone()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()); + let y = heap.allocate_first_fit(layout.clone()); + let z = heap.allocate_first_fit(layout.clone()); unsafe { heap.deallocate(y, layout.clone()); @@ -97,9 +96,9 @@ fn deallocate_right_behind() { let size = size_of::() * 5; let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()).unwrap(); - let y = heap.allocate_first_fit(layout.clone()).unwrap(); - let z = heap.allocate_first_fit(layout.clone()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()); + let y = heap.allocate_first_fit(layout.clone()); + let z = heap.allocate_first_fit(layout.clone()); unsafe { heap.deallocate(x, layout.clone()); @@ -117,10 +116,10 @@ fn deallocate_middle() { let size = size_of::() * 5; let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()).unwrap(); - let y = heap.allocate_first_fit(layout.clone()).unwrap(); - let z = heap.allocate_first_fit(layout.clone()).unwrap(); - let a = heap.allocate_first_fit(layout.clone()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()); + let y = heap.allocate_first_fit(layout.clone()); + let z = heap.allocate_first_fit(layout.clone()); + let a = heap.allocate_first_fit(layout.clone()); unsafe { heap.deallocate(x, layout.clone()); @@ -141,12 +140,12 @@ fn reallocate_double_usize() { let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); - let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let x = heap.allocate_first_fit(layout.clone()); unsafe { heap.deallocate(x, layout.clone()); } - let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()); unsafe { heap.deallocate(y, layout.clone()); } @@ -165,18 +164,18 @@ fn allocate_multiple_sizes() { let layout_3 = Layout::from_size_align(base_size * 3, base_align * 4).unwrap(); let layout_4 = Layout::from_size_align(base_size * 4, base_align).unwrap(); - let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); - let y = heap.allocate_first_fit(layout_2.clone()).unwrap(); + let x = heap.allocate_first_fit(layout_1.clone()); + let y = heap.allocate_first_fit(layout_2.clone()); assert_eq!(y as usize, x as usize + base_size * 2); - let z = heap.allocate_first_fit(layout_3.clone()).unwrap(); + let z = heap.allocate_first_fit(layout_3.clone()); assert_eq!(z as usize % (base_size * 4), 0); unsafe { heap.deallocate(x, layout_1.clone()); } - let a = heap.allocate_first_fit(layout_4.clone()).unwrap(); - let b = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let a = heap.allocate_first_fit(layout_4.clone()); + let b = heap.allocate_first_fit(layout_1.clone()); assert_eq!(b, x); unsafe { @@ -193,7 +192,7 @@ fn allocate_usize() { let layout = Layout::from_size_align(size_of::(), 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); } #[test] @@ -203,15 +202,14 @@ fn allocate_usize_in_bigger_block() { let layout_1 = Layout::from_size_align(size_of::() * 2, 1).unwrap(); let layout_2 = Layout::from_size_align(size_of::(), 1).unwrap(); - let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); - let y = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let x = heap.allocate_first_fit(layout_1.clone()); + let y = heap.allocate_first_fit(layout_1.clone()); unsafe { heap.deallocate(x, layout_1.clone()); } let z = heap.allocate_first_fit(layout_2.clone()); - assert!(z.is_ok()); - let z = z.unwrap(); + assert!(z as *mut u8 != core::ptr::null_mut()); assert_eq!(x, z); unsafe { @@ -229,9 +227,9 @@ fn align_from_small_to_big() { let layout_2 = Layout::from_size_align(8, 8).unwrap(); // allocate 28 bytes so that the heap end is only 4 byte aligned - assert!(heap.allocate_first_fit(layout_1.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout_1.clone()) as *mut u8 != core::ptr::null_mut()); // try to allocate a 8 byte aligned block - assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut()); } #[test] @@ -244,7 +242,7 @@ fn extend_empty_heap() { // Try to allocate full heap after extend let layout = Layout::from_size_align(2048, 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); } #[test] @@ -254,11 +252,11 @@ fn extend_full_heap() { let layout = Layout::from_size_align(1024, 1).unwrap(); // Allocate full heap, extend and allocate again to the max - assert!(heap.allocate_first_fit(layout.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); unsafe { heap.extend(1024); } - assert!(heap.allocate_first_fit(layout.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); } #[test] @@ -271,12 +269,12 @@ fn extend_fragmented_heap() { let alloc1 = heap.allocate_first_fit(layout_1.clone()); let alloc2 = heap.allocate_first_fit(layout_1.clone()); - assert!(alloc1.is_ok()); - assert!(alloc2.is_ok()); + assert!(alloc1 as *mut u8 != core::ptr::null_mut()); + assert!(alloc2 as *mut u8 != core::ptr::null_mut()); unsafe { // Create a hole at the beginning of the heap - heap.deallocate(alloc1.unwrap(), layout_1.clone()); + heap.deallocate(alloc1, layout_1.clone()); } unsafe { @@ -285,5 +283,5 @@ fn extend_fragmented_heap() { // We got additional 1024 bytes hole at the end of the heap // Try to allocate there - assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); + assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut()); } From dba8b555369440090f67f6849d96ac12379b5735 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20W=C4=99c=C5=82awski?= Date: Tue, 17 Apr 2018 20:21:24 +0200 Subject: [PATCH 2/3] Use Alloc for Heap and GlobalAlloc for LockedHeap --- src/hole.rs | 19 +++++----- src/lib.rs | 31 ++++++++-------- src/test.rs | 104 ++++++++++++++++++++++++++-------------------------- 3 files changed, 78 insertions(+), 76 deletions(-) diff --git a/src/hole.rs b/src/hole.rs index 6d6d645..a93b96e 100644 --- a/src/hole.rs +++ b/src/hole.rs @@ -1,5 +1,6 @@ +use core::ptr::NonNull; use core::mem::size_of; -use core::alloc::{Layout, Opaque}; +use core::alloc::{Layout, Opaque, AllocErr}; use super::align_up; @@ -42,17 +43,17 @@ impl HoleList { /// block is returned. /// This function uses the “first fit” strategy, so it uses the first hole that is big /// enough. Thus the runtime is in O(n) but it should be reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque { + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, AllocErr> { assert!(layout.size() >= Self::min_size()); - allocate_first_fit(&mut self.first, layout).map_or(0 as *mut Opaque, |allocation| { + allocate_first_fit(&mut self.first, layout).map(|allocation| { if let Some(padding) = allocation.front_padding { deallocate(&mut self.first, padding.addr, padding.size); } if let Some(padding) = allocation.back_padding { deallocate(&mut self.first, padding.addr, padding.size); } - allocation.info.addr as *mut Opaque + NonNull::new(allocation.info.addr as *mut Opaque).unwrap() }) } @@ -62,8 +63,8 @@ impl HoleList { /// This function walks the list and inserts the given block at the correct place. If the freed /// block is adjacent to another free block, the blocks are merged again. /// This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) { - deallocate(&mut self.first, ptr as usize, layout.size()) + pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { + deallocate(&mut self.first, ptr.as_ptr() as usize, layout.size()) } /// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed. @@ -182,7 +183,7 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option { /// care of freeing it again. /// This function uses the “first fit” strategy, so it breaks as soon as a big enough hole is /// found (and returns it). -fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Option { +fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result { loop { let allocation: Option = previous.next.as_mut().and_then(|current| { split_hole(current.info(), layout.clone()) @@ -191,7 +192,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Option { // hole is big enough, so remove it from the list by updating the previous pointer previous.next = previous.next.as_mut().unwrap().next.take(); - return Some(allocation); + return Ok(allocation); } None if previous.next.is_some() => { // try next hole @@ -199,7 +200,7 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Option { // this was the last hole, so no hole is big enough -> allocation not possible - return None; + return Err(AllocErr); } } } diff --git a/src/lib.rs b/src/lib.rs index cff78f2..54a18c1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -3,8 +3,6 @@ #![feature(pointer_methods)] #![no_std] -extern crate alloc; - #[cfg(test)] #[macro_use] extern crate std; @@ -14,9 +12,10 @@ extern crate spin; use hole::{Hole, HoleList}; use core::mem; +use core::ptr::NonNull; #[cfg(feature = "use_spin")] use core::ops::Deref; -use core::alloc::{GlobalAlloc, Layout, Opaque}; +use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque}; #[cfg(feature = "use_spin")] use spin::Mutex; @@ -27,8 +26,6 @@ mod test; #[cfg(feature = "use_spin")] pub static mut LOCKED_ALLOCATOR: LockedHeap = LockedHeap::empty(); -pub static mut ALLOCATOR: Heap = Heap::empty(); - /// A fixed size heap backed by a linked list of free memory blocks. pub struct Heap { bottom: usize, @@ -75,7 +72,7 @@ impl Heap { /// This function scans the list of free memory blocks and uses the first block that is big /// enough. The runtime is in O(n) where n is the number of free blocks, but it should be /// reasonably fast for small allocations. - pub fn allocate_first_fit(&mut self, layout: Layout) -> *mut Opaque { + pub fn allocate_first_fit(&mut self, layout: Layout) -> Result, AllocErr> { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -93,7 +90,7 @@ impl Heap { /// This function walks the list of free memory blocks and inserts the freed block at the /// correct place. If the freed block is adjacent to another free block, the blocks are merged /// again. This operation is in `O(n)` since the list needs to be sorted by address. - pub unsafe fn deallocate(&mut self, ptr: *mut Opaque, layout: Layout) { + pub unsafe fn deallocate(&mut self, ptr: NonNull, layout: Layout) { let mut size = layout.size(); if size < HoleList::min_size() { size = HoleList::min_size(); @@ -127,21 +124,21 @@ impl Heap { pub unsafe fn extend(&mut self, by: usize) { let top = self.top(); let layout = Layout::from_size_align(by, 1).unwrap(); - self.holes.deallocate(top as *mut Opaque, layout); + self.holes.deallocate(NonNull::new_unchecked(top as *mut Opaque), layout); self.size += by; } } -unsafe impl GlobalAlloc for Heap { - unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { - ALLOCATOR.allocate_first_fit(layout) +unsafe impl Alloc for Heap { + unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { + self.allocate_first_fit(layout) } - unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { - ALLOCATOR.deallocate(ptr, layout) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + self.deallocate(ptr, layout) } - fn oom(&self) -> ! { + fn oom(&mut self) -> ! { panic!("Out of memory"); } } @@ -181,11 +178,13 @@ impl Deref for LockedHeap { #[cfg(feature = "use_spin")] unsafe impl<'a> GlobalAlloc for &'a LockedHeap { unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { - LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout) + LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| { + allocation.as_ptr() + }) } unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { - LOCKED_ALLOCATOR.0.lock().deallocate(ptr, layout) + LOCKED_ALLOCATOR.0.lock().deallocate(NonNull::new_unchecked(ptr), layout) } fn oom(&self) -> ! { diff --git a/src/test.rs b/src/test.rs index 265ec1d..e731725 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,6 +1,6 @@ use std::prelude::v1::*; use std::mem::{size_of, align_of}; -use alloc::allocator::Layout; +use core::alloc::Layout; use super::*; fn new_heap() -> Heap { @@ -28,7 +28,7 @@ fn new_max_heap() -> Heap { fn empty() { let mut heap = Heap::empty(); let layout = Layout::from_size_align(1, 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 == core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout.clone()).is_err()); } #[test] @@ -36,7 +36,7 @@ fn oom() { let mut heap = new_heap(); let layout = Layout::from_size_align(heap.size() + 1, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); - assert!(addr as *mut u8 == core::ptr::null_mut()); + assert!(addr.is_err()); } #[test] @@ -45,14 +45,15 @@ fn allocate_double_usize() { let size = size_of::() * 2; let layout = Layout::from_size_align(size, align_of::()); let addr = heap.allocate_first_fit(layout.unwrap()); - assert!(addr as *mut u8 != core::ptr::null_mut()); - assert!(addr as usize == heap.bottom); + assert!(addr.is_ok()); + let addr = addr.unwrap().as_ptr() as usize; + assert!(addr == heap.bottom); let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left"); assert!(hole_addr == heap.bottom + size); assert!(hole_size == heap.size - size); unsafe { - assert_eq!((*((addr as usize + size) as *const Hole)).size, heap.size - size); + assert_eq!((*((addr + size) as *const Hole)).size, heap.size - size); } } @@ -61,9 +62,9 @@ fn allocate_and_free_double_usize() { let mut heap = new_heap(); let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); - let x = heap.allocate_first_fit(layout.clone()); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { - *(x as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); + *(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe); heap.deallocate(x, layout.clone()); assert_eq!((*(heap.bottom as *const Hole)).size, heap.size); @@ -76,17 +77,17 @@ fn deallocate_right_before() { let mut heap = new_heap(); let layout = Layout::from_size_align(size_of::() * 5, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()); - let y = heap.allocate_first_fit(layout.clone()); - let z = heap.allocate_first_fit(layout.clone()); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { heap.deallocate(y, layout.clone()); - assert_eq!((*(y as *const Hole)).size, layout.size()); + assert_eq!((*(y.as_ptr() as *const Hole)).size, layout.size()); heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, layout.size() * 2); + assert_eq!((*(x.as_ptr() as *const Hole)).size, layout.size() * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -96,17 +97,17 @@ fn deallocate_right_behind() { let size = size_of::() * 5; let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()); - let y = heap.allocate_first_fit(layout.clone()); - let z = heap.allocate_first_fit(layout.clone()); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); heap.deallocate(y, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size * 2); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 2); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -116,21 +117,21 @@ fn deallocate_middle() { let size = size_of::() * 5; let layout = Layout::from_size_align(size, 1).unwrap(); - let x = heap.allocate_first_fit(layout.clone()); - let y = heap.allocate_first_fit(layout.clone()); - let z = heap.allocate_first_fit(layout.clone()); - let a = heap.allocate_first_fit(layout.clone()); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); + let z = heap.allocate_first_fit(layout.clone()).unwrap(); + let a = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { heap.deallocate(x, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); heap.deallocate(z, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size); - assert_eq!((*(z as *const Hole)).size, size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size); + assert_eq!((*(z.as_ptr() as *const Hole)).size, size); heap.deallocate(y, layout.clone()); - assert_eq!((*(x as *const Hole)).size, size * 3); + assert_eq!((*(x.as_ptr() as *const Hole)).size, size * 3); heap.deallocate(a, layout.clone()); - assert_eq!((*(x as *const Hole)).size, heap.size); + assert_eq!((*(x.as_ptr() as *const Hole)).size, heap.size); } } @@ -140,12 +141,12 @@ fn reallocate_double_usize() { let layout = Layout::from_size_align(size_of::() * 2, align_of::()).unwrap(); - let x = heap.allocate_first_fit(layout.clone()); + let x = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { heap.deallocate(x, layout.clone()); } - let y = heap.allocate_first_fit(layout.clone()); + let y = heap.allocate_first_fit(layout.clone()).unwrap(); unsafe { heap.deallocate(y, layout.clone()); } @@ -164,18 +165,18 @@ fn allocate_multiple_sizes() { let layout_3 = Layout::from_size_align(base_size * 3, base_align * 4).unwrap(); let layout_4 = Layout::from_size_align(base_size * 4, base_align).unwrap(); - let x = heap.allocate_first_fit(layout_1.clone()); - let y = heap.allocate_first_fit(layout_2.clone()); - assert_eq!(y as usize, x as usize + base_size * 2); - let z = heap.allocate_first_fit(layout_3.clone()); - assert_eq!(z as usize % (base_size * 4), 0); + let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let y = heap.allocate_first_fit(layout_2.clone()).unwrap(); + assert_eq!(y.as_ptr() as usize, x.as_ptr() as usize + base_size * 2); + let z = heap.allocate_first_fit(layout_3.clone()).unwrap(); + assert_eq!(z.as_ptr() as usize % (base_size * 4), 0); unsafe { heap.deallocate(x, layout_1.clone()); } - let a = heap.allocate_first_fit(layout_4.clone()); - let b = heap.allocate_first_fit(layout_1.clone()); + let a = heap.allocate_first_fit(layout_4.clone()).unwrap(); + let b = heap.allocate_first_fit(layout_1.clone()).unwrap(); assert_eq!(b, x); unsafe { @@ -192,7 +193,7 @@ fn allocate_usize() { let layout = Layout::from_size_align(size_of::(), 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] @@ -202,14 +203,15 @@ fn allocate_usize_in_bigger_block() { let layout_1 = Layout::from_size_align(size_of::() * 2, 1).unwrap(); let layout_2 = Layout::from_size_align(size_of::(), 1).unwrap(); - let x = heap.allocate_first_fit(layout_1.clone()); - let y = heap.allocate_first_fit(layout_1.clone()); + let x = heap.allocate_first_fit(layout_1.clone()).unwrap(); + let y = heap.allocate_first_fit(layout_1.clone()).unwrap(); unsafe { heap.deallocate(x, layout_1.clone()); } let z = heap.allocate_first_fit(layout_2.clone()); - assert!(z as *mut u8 != core::ptr::null_mut()); + assert!(z.is_ok()); + let z = z.unwrap(); assert_eq!(x, z); unsafe { @@ -227,9 +229,9 @@ fn align_from_small_to_big() { let layout_2 = Layout::from_size_align(8, 8).unwrap(); // allocate 28 bytes so that the heap end is only 4 byte aligned - assert!(heap.allocate_first_fit(layout_1.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout_1.clone()).is_ok()); // try to allocate a 8 byte aligned block - assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); } #[test] @@ -242,7 +244,7 @@ fn extend_empty_heap() { // Try to allocate full heap after extend let layout = Layout::from_size_align(2048, 1).unwrap(); - assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] @@ -252,11 +254,11 @@ fn extend_full_heap() { let layout = Layout::from_size_align(1024, 1).unwrap(); // Allocate full heap, extend and allocate again to the max - assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); unsafe { heap.extend(1024); } - assert!(heap.allocate_first_fit(layout.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout.clone()).is_ok()); } #[test] @@ -269,12 +271,12 @@ fn extend_fragmented_heap() { let alloc1 = heap.allocate_first_fit(layout_1.clone()); let alloc2 = heap.allocate_first_fit(layout_1.clone()); - assert!(alloc1 as *mut u8 != core::ptr::null_mut()); - assert!(alloc2 as *mut u8 != core::ptr::null_mut()); + assert!(alloc1.is_ok()); + assert!(alloc2.is_ok()); unsafe { // Create a hole at the beginning of the heap - heap.deallocate(alloc1, layout_1.clone()); + heap.deallocate(alloc1.unwrap(), layout_1.clone()); } unsafe { @@ -283,5 +285,5 @@ fn extend_fragmented_heap() { // We got additional 1024 bytes hole at the end of the heap // Try to allocate there - assert!(heap.allocate_first_fit(layout_2.clone()) as *mut u8 != core::ptr::null_mut()); + assert!(heap.allocate_first_fit(layout_2.clone()).is_ok()); } From aec3742e248b637ccf6dcf0634fa6df2b8d52e86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Robert=20W=C4=99c=C5=82awski?= Date: Tue, 17 Apr 2018 23:51:21 +0200 Subject: [PATCH 3/3] Removed LOCKED_ALLOCATOR static --- src/lib.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 54a18c1..7f6967a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,9 +23,6 @@ mod hole; #[cfg(test)] mod test; -#[cfg(feature = "use_spin")] -pub static mut LOCKED_ALLOCATOR: LockedHeap = LockedHeap::empty(); - /// A fixed size heap backed by a linked list of free memory blocks. pub struct Heap { bottom: usize, @@ -176,15 +173,15 @@ impl Deref for LockedHeap { } #[cfg(feature = "use_spin")] -unsafe impl<'a> GlobalAlloc for &'a LockedHeap { +unsafe impl GlobalAlloc for LockedHeap { unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { - LOCKED_ALLOCATOR.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| { + self.0.lock().allocate_first_fit(layout).ok().map_or(0 as *mut Opaque, |allocation| { allocation.as_ptr() }) } unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { - LOCKED_ALLOCATOR.0.lock().deallocate(NonNull::new_unchecked(ptr), layout) + self.0.lock().deallocate(NonNull::new_unchecked(ptr), layout) } fn oom(&self) -> ! {