|
47 | 47 | #![no_std] |
48 | 48 | #![feature(alloc, allocator_api)] |
49 | 49 |
|
| 50 | +extern crate alloc; |
50 | 51 | extern crate cortex_m; |
51 | 52 | extern crate linked_list_allocator; |
52 | | -extern crate alloc; |
53 | 53 |
|
54 | 54 | use core::alloc::{GlobalAlloc, Layout, Opaque}; |
55 | 55 | use core::ptr::NonNull; |
56 | 56 |
|
57 | | -use linked_list_allocator::Heap; |
58 | 57 | use cortex_m::interrupt::Mutex; |
| 58 | +use linked_list_allocator::Heap; |
59 | 59 |
|
60 | 60 | pub struct CortexMHeap { |
61 | 61 | heap: Mutex<Heap>, |
62 | 62 | } |
63 | 63 |
|
64 | 64 | impl CortexMHeap { |
65 | | - |
66 | 65 | /// Crate a new UNINITIALIZED heap allocator |
67 | 66 | /// |
68 | 67 | /// You must initialize this heap using the |
@@ -96,19 +95,21 @@ impl CortexMHeap { |
96 | 95 | /// |
97 | 96 | /// - This function must be called exactly ONCE. |
98 | 97 | /// - `size > 0` |
99 | | - pub unsafe fn init(&self, start_addr: usize, size: usize){ |
| 98 | + pub unsafe fn init(&self, start_addr: usize, size: usize) { |
100 | 99 | self.heap.lock(|heap| heap.init(start_addr, size)); |
101 | 100 | } |
102 | 101 | } |
103 | 102 |
|
104 | 103 | unsafe impl GlobalAlloc for CortexMHeap { |
105 | 104 | unsafe fn alloc(&self, layout: Layout) -> *mut Opaque { |
106 | | - self.heap.lock(|heap| { |
107 | | - heap.allocate_first_fit(layout) |
108 | | - }).ok().map_or(0 as *mut Opaque, |allocation| allocation.as_ptr()) |
| 105 | + self.heap |
| 106 | + .lock(|heap| heap.allocate_first_fit(layout)) |
| 107 | + .ok() |
| 108 | + .map_or(0 as *mut Opaque, |allocation| allocation.as_ptr()) |
109 | 109 | } |
110 | 110 |
|
111 | 111 | unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) { |
112 | | - self.heap.lock(|heap| heap.deallocate(NonNull::new_unchecked(ptr), layout)); |
| 112 | + self.heap |
| 113 | + .lock(|heap| heap.deallocate(NonNull::new_unchecked(ptr), layout)); |
113 | 114 | } |
114 | 115 | } |
0 commit comments