Skip to content

Commit 274f80e

Browse files
authored
Rollup merge of rust-lang#59533 - Zoxc:arena-slices, r=michaelwoerister
Support allocating iterators with arenas Split out from rust-lang#57173. r? @michaelwoerister
2 parents e655b91 + 59ff059 commit 274f80e

File tree

3 files changed

+141
-16
lines changed

3 files changed

+141
-16
lines changed

Cargo.lock

+1
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ name = "arena"
5454
version = "0.0.0"
5555
dependencies = [
5656
"rustc_data_structures 0.0.0",
57+
"smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
5758
]
5859

5960
[[package]]

src/libarena/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,4 @@ crate-type = ["dylib"]
1111

1212
[dependencies]
1313
rustc_data_structures = { path = "../librustc_data_structures" }
14+
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }

src/libarena/lib.rs

+139-16
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@
2323

2424
extern crate alloc;
2525

26+
use rustc_data_structures::cold_path;
2627
use rustc_data_structures::sync::MTLock;
28+
use smallvec::SmallVec;
2729

2830
use std::cell::{Cell, RefCell};
2931
use std::cmp;
@@ -55,13 +57,16 @@ pub struct TypedArena<T> {
5557
struct TypedArenaChunk<T> {
5658
/// The raw storage for the arena chunk.
5759
storage: RawVec<T>,
60+
/// The number of valid entries in the chunk.
61+
entries: usize,
5862
}
5963

6064
impl<T> TypedArenaChunk<T> {
6165
#[inline]
6266
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
6367
TypedArenaChunk {
6468
storage: RawVec::with_capacity(capacity),
69+
entries: 0,
6570
}
6671
}
6772

@@ -149,6 +154,34 @@ impl<T> TypedArena<T> {
149154
}
150155
}
151156

157+
#[inline]
158+
fn can_allocate(&self, len: usize) -> bool {
159+
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
160+
let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap();
161+
available_capacity_bytes >= at_least_bytes
162+
}
163+
164+
/// Ensures there's enough space in the current chunk to fit `len` objects.
165+
#[inline]
166+
fn ensure_capacity(&self, len: usize) {
167+
if !self.can_allocate(len) {
168+
self.grow(len);
169+
debug_assert!(self.can_allocate(len));
170+
}
171+
}
172+
173+
#[inline]
174+
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
175+
assert!(mem::size_of::<T>() != 0);
176+
assert!(len != 0);
177+
178+
self.ensure_capacity(len);
179+
180+
let start_ptr = self.ptr.get();
181+
self.ptr.set(start_ptr.add(len));
182+
start_ptr
183+
}
184+
152185
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
153186
/// reference to it. Will panic if passed a zero-sized types.
154187
///
@@ -161,21 +194,64 @@ impl<T> TypedArena<T> {
161194
where
162195
T: Copy,
163196
{
197+
unsafe {
198+
let len = slice.len();
199+
let start_ptr = self.alloc_raw_slice(len);
200+
slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
201+
slice::from_raw_parts_mut(start_ptr, len)
202+
}
203+
}
204+
205+
#[inline]
206+
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
164207
assert!(mem::size_of::<T>() != 0);
165-
assert!(slice.len() != 0);
208+
let mut iter = iter.into_iter();
209+
let size_hint = iter.size_hint();
166210

167-
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
168-
let at_least_bytes = slice.len() * mem::size_of::<T>();
169-
if available_capacity_bytes < at_least_bytes {
170-
self.grow(slice.len());
171-
}
211+
match size_hint {
212+
(min, Some(max)) if min == max => {
213+
// We know the exact number of elements the iterator will produce here
214+
let len = min;
172215

173-
unsafe {
174-
let start_ptr = self.ptr.get();
175-
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
176-
self.ptr.set(start_ptr.add(arena_slice.len()));
177-
arena_slice.copy_from_slice(slice);
178-
arena_slice
216+
if len == 0 {
217+
return &mut [];
218+
}
219+
220+
self.ensure_capacity(len);
221+
222+
let slice = self.ptr.get();
223+
224+
unsafe {
225+
let mut ptr = self.ptr.get();
226+
for _ in 0..len {
227+
// Write into uninitialized memory.
228+
ptr::write(ptr, iter.next().unwrap());
229+
// Advance the pointer.
230+
ptr = ptr.offset(1);
231+
// Update the pointer per iteration so if `iter.next()` panics
232+
// we destroy the correct amount
233+
self.ptr.set(ptr);
234+
}
235+
slice::from_raw_parts_mut(slice, len)
236+
}
237+
}
238+
_ => {
239+
cold_path(move || -> &mut [T] {
240+
let mut vec: SmallVec<[_; 8]> = iter.collect();
241+
if vec.is_empty() {
242+
return &mut [];
243+
}
244+
// Move the content to the arena by copying it and then forgetting
245+
// the content of the SmallVec
246+
unsafe {
247+
let len = vec.len();
248+
let start_ptr = self.alloc_raw_slice(len);
249+
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
250+
vec.set_len(0);
251+
slice::from_raw_parts_mut(start_ptr, len)
252+
}
253+
})
254+
}
179255
}
180256
}
181257

@@ -189,6 +265,7 @@ impl<T> TypedArena<T> {
189265
if let Some(last_chunk) = chunks.last_mut() {
190266
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
191267
let currently_used_cap = used_bytes / mem::size_of::<T>();
268+
last_chunk.entries = currently_used_cap;
192269
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
193270
self.end.set(last_chunk.end());
194271
return;
@@ -222,8 +299,7 @@ impl<T> TypedArena<T> {
222299
let len = chunks_borrow.len();
223300
// If `T` is ZST, code below has no effect.
224301
for mut chunk in chunks_borrow.drain(..len-1) {
225-
let cap = chunk.storage.cap();
226-
chunk.destroy(cap);
302+
chunk.destroy(chunk.entries);
227303
}
228304
}
229305
}
@@ -265,8 +341,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
265341
self.clear_last_chunk(&mut last_chunk);
266342
// The last chunk will be dropped. Destroy all other chunks.
267343
for chunk in chunks_borrow.iter_mut() {
268-
let cap = chunk.storage.cap();
269-
chunk.destroy(cap);
344+
chunk.destroy(chunk.entries);
270345
}
271346
}
272347
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
@@ -410,6 +485,54 @@ impl DroplessArena {
410485
arena_slice
411486
}
412487
}
488+
489+
#[inline]
490+
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
491+
let mut iter = iter.into_iter();
492+
assert!(mem::size_of::<T>() != 0);
493+
assert!(!mem::needs_drop::<T>());
494+
495+
let size_hint = iter.size_hint();
496+
497+
match size_hint {
498+
(min, Some(max)) if min == max => {
499+
// We know the exact number of elements the iterator will produce here
500+
let len = min;
501+
502+
if len == 0 {
503+
return &mut []
504+
}
505+
let size = len.checked_mul(mem::size_of::<T>()).unwrap();
506+
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
507+
unsafe {
508+
for i in 0..len {
509+
ptr::write(mem.offset(i as isize), iter.next().unwrap())
510+
}
511+
slice::from_raw_parts_mut(mem, len)
512+
}
513+
}
514+
(_, _) => {
515+
cold_path(move || -> &mut [T] {
516+
let mut vec: SmallVec<[_; 8]> = iter.collect();
517+
if vec.is_empty() {
518+
return &mut [];
519+
}
520+
// Move the content to the arena by copying it and then forgetting
521+
// the content of the SmallVec
522+
unsafe {
523+
let len = vec.len();
524+
let start_ptr = self.alloc_raw(
525+
len * mem::size_of::<T>(),
526+
mem::align_of::<T>()
527+
) as *mut _ as *mut T;
528+
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
529+
vec.set_len(0);
530+
slice::from_raw_parts_mut(start_ptr, len)
531+
}
532+
})
533+
}
534+
}
535+
}
413536
}
414537

415538
#[derive(Default)]

0 commit comments

Comments
 (0)