diff --git a/Cargo.lock b/Cargo.lock index c071a2e11d269..b97fbe75b97c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -54,6 +54,7 @@ name = "arena" version = "0.0.0" dependencies = [ "rustc_data_structures 0.0.0", + "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] diff --git a/src/libarena/Cargo.toml b/src/libarena/Cargo.toml index 82fc64ba64e33..aa1bf38b99597 100644 --- a/src/libarena/Cargo.toml +++ b/src/libarena/Cargo.toml @@ -11,3 +11,4 @@ crate-type = ["dylib"] [dependencies] rustc_data_structures = { path = "../librustc_data_structures" } +smallvec = { version = "0.6.7", features = ["union", "may_dangle"] } diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 8ae046c0796bc..0a5b79c36aad8 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -23,7 +23,9 @@ extern crate alloc; +use rustc_data_structures::cold_path; use rustc_data_structures::sync::MTLock; +use smallvec::SmallVec; use std::cell::{Cell, RefCell}; use std::cmp; @@ -55,6 +57,8 @@ pub struct TypedArena { struct TypedArenaChunk { /// The raw storage for the arena chunk. storage: RawVec, + /// The number of valid entries in the chunk. + entries: usize, } impl TypedArenaChunk { @@ -62,6 +66,7 @@ impl TypedArenaChunk { unsafe fn new(capacity: usize) -> TypedArenaChunk { TypedArenaChunk { storage: RawVec::with_capacity(capacity), + entries: 0, } } @@ -149,6 +154,34 @@ impl TypedArena { } } + #[inline] + fn can_allocate(&self, len: usize) -> bool { + let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; + let at_least_bytes = len.checked_mul(mem::size_of::()).unwrap(); + available_capacity_bytes >= at_least_bytes + } + + /// Ensures there's enough space in the current chunk to fit `len` objects. + #[inline] + fn ensure_capacity(&self, len: usize) { + if !self.can_allocate(len) { + self.grow(len); + debug_assert!(self.can_allocate(len)); + } + } + + #[inline] + unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T { + assert!(mem::size_of::() != 0); + assert!(len != 0); + + self.ensure_capacity(len); + + let start_ptr = self.ptr.get(); + self.ptr.set(start_ptr.add(len)); + start_ptr + } + /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable /// reference to it. Will panic if passed a zero-sized types. /// @@ -161,21 +194,64 @@ impl TypedArena { where T: Copy, { + unsafe { + let len = slice.len(); + let start_ptr = self.alloc_raw_slice(len); + slice.as_ptr().copy_to_nonoverlapping(start_ptr, len); + slice::from_raw_parts_mut(start_ptr, len) + } + } + + #[inline] + pub fn alloc_from_iter>(&self, iter: I) -> &mut [T] { assert!(mem::size_of::() != 0); - assert!(slice.len() != 0); + let mut iter = iter.into_iter(); + let size_hint = iter.size_hint(); - let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; - let at_least_bytes = slice.len() * mem::size_of::(); - if available_capacity_bytes < at_least_bytes { - self.grow(slice.len()); - } + match size_hint { + (min, Some(max)) if min == max => { + // We know the exact number of elements the iterator will produce here + let len = min; - unsafe { - let start_ptr = self.ptr.get(); - let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len()); - self.ptr.set(start_ptr.add(arena_slice.len())); - arena_slice.copy_from_slice(slice); - arena_slice + if len == 0 { + return &mut []; + } + + self.ensure_capacity(len); + + let slice = self.ptr.get(); + + unsafe { + let mut ptr = self.ptr.get(); + for _ in 0..len { + // Write into uninitialized memory. + ptr::write(ptr, iter.next().unwrap()); + // Advance the pointer. + ptr = ptr.offset(1); + // Update the pointer per iteration so if `iter.next()` panics + // we destroy the correct amount + self.ptr.set(ptr); + } + slice::from_raw_parts_mut(slice, len) + } + } + _ => { + cold_path(move || -> &mut [T] { + let mut vec: SmallVec<[_; 8]> = iter.collect(); + if vec.is_empty() { + return &mut []; + } + // Move the content to the arena by copying it and then forgetting + // the content of the SmallVec + unsafe { + let len = vec.len(); + let start_ptr = self.alloc_raw_slice(len); + vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); + vec.set_len(0); + slice::from_raw_parts_mut(start_ptr, len) + } + }) + } } } @@ -189,6 +265,7 @@ impl TypedArena { if let Some(last_chunk) = chunks.last_mut() { let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; let currently_used_cap = used_bytes / mem::size_of::(); + last_chunk.entries = currently_used_cap; if last_chunk.storage.reserve_in_place(currently_used_cap, n) { self.end.set(last_chunk.end()); return; @@ -222,8 +299,7 @@ impl TypedArena { let len = chunks_borrow.len(); // If `T` is ZST, code below has no effect. for mut chunk in chunks_borrow.drain(..len-1) { - let cap = chunk.storage.cap(); - chunk.destroy(cap); + chunk.destroy(chunk.entries); } } } @@ -265,8 +341,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena { self.clear_last_chunk(&mut last_chunk); // The last chunk will be dropped. Destroy all other chunks. for chunk in chunks_borrow.iter_mut() { - let cap = chunk.storage.cap(); - chunk.destroy(cap); + chunk.destroy(chunk.entries); } } // RawVec handles deallocation of `last_chunk` and `self.chunks`. @@ -410,6 +485,54 @@ impl DroplessArena { arena_slice } } + + #[inline] + pub fn alloc_from_iter>(&self, iter: I) -> &mut [T] { + let mut iter = iter.into_iter(); + assert!(mem::size_of::() != 0); + assert!(!mem::needs_drop::()); + + let size_hint = iter.size_hint(); + + match size_hint { + (min, Some(max)) if min == max => { + // We know the exact number of elements the iterator will produce here + let len = min; + + if len == 0 { + return &mut [] + } + let size = len.checked_mul(mem::size_of::()).unwrap(); + let mem = self.alloc_raw(size, mem::align_of::()) as *mut _ as *mut T; + unsafe { + for i in 0..len { + ptr::write(mem.offset(i as isize), iter.next().unwrap()) + } + slice::from_raw_parts_mut(mem, len) + } + } + (_, _) => { + cold_path(move || -> &mut [T] { + let mut vec: SmallVec<[_; 8]> = iter.collect(); + if vec.is_empty() { + return &mut []; + } + // Move the content to the arena by copying it and then forgetting + // the content of the SmallVec + unsafe { + let len = vec.len(); + let start_ptr = self.alloc_raw( + len * mem::size_of::(), + mem::align_of::() + ) as *mut _ as *mut T; + vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); + vec.set_len(0); + slice::from_raw_parts_mut(start_ptr, len) + } + }) + } + } + } } #[derive(Default)]