Skip to content

Commit d11cd46

Browse files
Add TypedArena::alloc_slice.
1 parent 6572a46 commit d11cd46

File tree

1 file changed

+39
-6
lines changed

1 file changed

+39
-6
lines changed

Diff for: src/libarena/lib.rs

+39-6
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ use std::intrinsics;
4646
use std::marker::{PhantomData, Send};
4747
use std::mem;
4848
use std::ptr;
49+
use std::slice;
4950

5051
use alloc::heap;
5152
use alloc::raw_vec::RawVec;
@@ -133,7 +134,7 @@ impl<T> TypedArena<T> {
133134
#[inline]
134135
pub fn alloc(&self, object: T) -> &mut T {
135136
if self.ptr == self.end {
136-
self.grow()
137+
self.grow(1)
137138
}
138139

139140
unsafe {
@@ -154,24 +155,56 @@ impl<T> TypedArena<T> {
154155
}
155156
}
156157

158+
/// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable
159+
/// reference to it. Will panic if passed a zero-sized types.
160+
#[inline]
161+
pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
162+
where T: Copy {
163+
assert!(mem::size_of::<T>() != 0);
164+
if slice.len() == 0 {
165+
return unsafe { slice::from_raw_parts_mut(heap::EMPTY as *mut T, 0) };
166+
}
167+
168+
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
169+
let at_least_bytes = slice.len() * mem::size_of::<T>();
170+
if available_capacity_bytes < at_least_bytes {
171+
self.grow(slice.len());
172+
}
173+
174+
unsafe {
175+
let start_ptr = self.ptr.get();
176+
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
177+
self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
178+
arena_slice.copy_from_slice(slice);
179+
arena_slice
180+
}
181+
}
182+
157183
/// Grows the arena.
158184
#[inline(never)]
159185
#[cold]
160-
fn grow(&self) {
186+
fn grow(&self, n: usize) {
161187
unsafe {
162188
let mut chunks = self.chunks.borrow_mut();
163-
let (chunk, new_capacity);
189+
let (chunk, mut new_capacity);
164190
if let Some(last_chunk) = chunks.last_mut() {
165-
if last_chunk.storage.double_in_place() {
191+
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
192+
let currently_used_cap = used_bytes / mem::size_of::<T>();
193+
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
166194
self.end.set(last_chunk.end());
167195
return;
168196
} else {
169197
let prev_capacity = last_chunk.storage.cap();
170-
new_capacity = prev_capacity.checked_mul(2).unwrap();
198+
loop {
199+
new_capacity = prev_capacity.checked_mul(2).unwrap();
200+
if new_capacity >= currently_used_cap + n {
201+
break;
202+
}
203+
}
171204
}
172205
} else {
173206
let elem_size = cmp::max(1, mem::size_of::<T>());
174-
new_capacity = cmp::max(1, PAGE / elem_size);
207+
new_capacity = cmp::max(n, PAGE / elem_size);
175208
}
176209
chunk = TypedArenaChunk::<T>::new(new_capacity);
177210
self.ptr.set(chunk.start());

0 commit comments

Comments
 (0)