Skip to content

[WIP] Promote types which allocate to an arena to avoid the use of Lrc #56382

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 50 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
f94330e
Add a trait to include allocations to an arena
Zoxc Nov 30, 2018
b39522e
Update inferred_outlives_of
Zoxc Nov 30, 2018
03464a7
Update variances_of
Zoxc Nov 30, 2018
aa48e00
Update associated_item_def_ids
Zoxc Nov 30, 2018
45ee7b9
Add DeferDeallocs impls for HashMap and HashSet
Zoxc Nov 30, 2018
be2f016
Update inherent_impls
Zoxc Nov 30, 2018
5487767
Update mir_keys
Zoxc Nov 30, 2018
8b3c315
Move DeferredDeallocs to rustc_data_structures
Zoxc Nov 30, 2018
233fa8a
Update region_scope_tree
Zoxc Nov 30, 2018
a43f967
Update rvalue_promotable_map
Zoxc Nov 30, 2018
64c7588
Update vtable_methods
Zoxc Nov 30, 2018
a4d9d53
Add a Bx type which represents a box in an arena
Zoxc Nov 30, 2018
9bfb08b
Update mir_const_qualif
Zoxc Nov 30, 2018
7dcb7b2
Update used_trait_imports
Zoxc Nov 30, 2018
7a75ef3
Update borrowck
Zoxc Nov 30, 2018
b254707
Update trait_impls_of
Zoxc Nov 30, 2018
2c0bcac
Update specialization_graph_of
Zoxc Nov 30, 2018
edf4787
Update lint_levels
Zoxc Nov 30, 2018
064f12d
Update upstream_monomorphizations and upstream_monomorphizations_for
Zoxc Nov 30, 2018
eac8d13
Update implementations_of_trait and all_trait_implementations
Zoxc Nov 30, 2018
96ef059
Update resolve_lifetimes, named_region_map, is_late_bound_map and obj…
Zoxc Nov 30, 2018
5159542
Update item_children
Zoxc Nov 30, 2018
0701f5d
Update stability_index, all_crate_nums and features_query
Zoxc Nov 30, 2018
c869c62
Update all_traits
Zoxc Nov 30, 2018
6d47856
Update trait queries
Zoxc Nov 30, 2018
6b44803
fix
Zoxc Nov 30, 2018
2dff33d
fixes
Zoxc Dec 1, 2018
94a6f05
wip
Zoxc Dec 1, 2018
c80288c
areana tweaks
Zoxc Dec 1, 2018
74287dd
wip
Zoxc Dec 1, 2018
67e3497
wip
Zoxc Dec 1, 2018
9b24ac6
tune typedarena
Zoxc Dec 1, 2018
ae4ed68
arena
Zoxc Dec 1, 2018
58cb479
bx fix
Zoxc Dec 1, 2018
88fbb09
debug fix
Zoxc Dec 1, 2018
96be4bc
Trait queries fix
Zoxc Dec 1, 2018
072ea6b
Update privacy_access_levels
Zoxc Dec 1, 2018
d8f1bcd
Update in_scope_traits_map
Zoxc Dec 1, 2018
43d892c
Update freevars and module_exports
Zoxc Dec 1, 2018
6eedc3c
Update wasm_import_module_map, target_features_whitelist_node and mod…
Zoxc Dec 1, 2018
4814874
Update get_lib_features, defined_lib_features, get_lang_items, define…
Zoxc Dec 1, 2018
05e8378
Update visible_parent_map
Zoxc Dec 1, 2018
e21587c
Update foreign_modules and dllimport_foreign_items
Zoxc Dec 1, 2018
13d6e88
Update dylib_dependency_formats, extern_crate and reachable_non_generics
Zoxc Dec 1, 2018
8d8d6a5
Fix foreign_modules
Zoxc Dec 1, 2018
50bae31
fix reachable_non_generics
Zoxc Dec 1, 2018
970faca
Update inferred_outlives_crate
Zoxc Dec 1, 2018
d03c5b9
Update crate_variances
Zoxc Dec 1, 2018
474fecc
Update GenericPredicates queries
Zoxc Dec 1, 2018
648239f
fix bx
Zoxc Dec 1, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
242 changes: 201 additions & 41 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@
extern crate alloc;
extern crate rustc_data_structures;

use rustc_data_structures::sync::MTLock;
use rustc_data_structures::defer_deallocs::DeferDeallocs;
use rustc_data_structures::sync::{MTLock, WorkerLocal};

use std::cell::{Cell, RefCell};
use std::cmp;
Expand All @@ -44,7 +45,6 @@ use std::marker::{PhantomData, Send};
use std::mem;
use std::ptr;
use std::slice;

use alloc::raw_vec::RawVec;

/// An arena that can hold objects of only one type.
Expand Down Expand Up @@ -132,30 +132,54 @@ impl<T> TypedArena<T> {
/// Allocates an object in the `TypedArena`, returning a reference to it.
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
if self.ptr == self.end {
self.grow(1)
}
// Zero sized path
if mem::size_of::<T>() == 0 {
if self.ptr == self.end {
self.grow(1)
}

unsafe {
if mem::size_of::<T>() == 0 {
unsafe {
self.ptr
.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1)
as *mut T);
let ptr = mem::align_of::<T>() as *mut T;
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr
return &mut *ptr;
}
}

let ptr = self.ptr.get();

unsafe {
if std::intrinsics::unlikely(ptr == self.end.get()) {
self.grow_and_alloc(object)
} else {
let ptr = self.ptr.get();
// Advance the pointer.
self.ptr.set(self.ptr.get().offset(1));
// Write into uninitialized memory.
ptr::write(ptr, object);
&mut *ptr
self.alloc_unchecked(ptr, object)
}
}
}

#[inline(always)]
unsafe fn alloc_unchecked(&self, ptr: *mut T, object: T) -> &mut T {
// Advance the pointer.
self.ptr.set(ptr.offset(1));
// Write into uninitialized memory.
ptr::write(ptr, object);
&mut *ptr
}

#[inline(never)]
#[cold]
fn grow_and_alloc(&self, object: T) -> &mut T {
// We move the object in this function so if it has a destructor
// the fast path need not have an unwind handler to destroy it
self.grow(1);
unsafe {
self.alloc_unchecked(self.ptr.get(), object)
}
}

/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
/// reference to it. Will panic if passed a zero-sized types.
///
Expand All @@ -174,7 +198,7 @@ impl<T> TypedArena<T> {
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
let at_least_bytes = slice.len() * mem::size_of::<T>();
if available_capacity_bytes < at_least_bytes {
self.grow(slice.len());
self.grow_slice(slice.len());
}

unsafe {
Expand All @@ -186,9 +210,14 @@ impl<T> TypedArena<T> {
}
}

/// Grows the arena.
#[inline(never)]
#[cold]
fn grow_slice(&self, n: usize) {
self.grow(n)
}

/// Grows the arena.
#[inline(always)]
fn grow(&self, n: usize) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
Expand Down Expand Up @@ -283,6 +312,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {

unsafe impl<T: Send> Send for TypedArena<T> {}

type BackingType = usize;
const BLOCK_SIZE: usize = std::mem::size_of::<BackingType>();

#[inline(always)]
fn required_backing_types(bytes: usize) -> usize {
assert!(BLOCK_SIZE.is_power_of_two());
// FIXME: This addition could overflow
(bytes + BLOCK_SIZE - 1) / BLOCK_SIZE
}

#[inline(always)]
fn align(val: usize, align: usize) -> usize {
assert!(align.is_power_of_two());
(val + align - 1) & !(align - 1)
}

pub struct DroplessArena {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut u8>,
Expand All @@ -292,7 +337,42 @@ pub struct DroplessArena {
end: Cell<*mut u8>,

/// A vector of arena chunks.
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
chunks: RefCell<Vec<TypedArenaChunk<BackingType>>>,
}

#[no_mangle]
pub fn tatest1(a: &TypedArena<usize>) -> &usize {
a.alloc(64usize)
}

#[no_mangle]
pub fn atest1(a: &DroplessArena) -> &usize {
a.alloc(64usize)
}

#[no_mangle]
pub fn atest2(a: &SyncDroplessArena, b: Box<usize>) -> &Box<usize> {
a.promote(b)
}

#[no_mangle]
pub fn atest6(a: &SyncDroplessArena, b: usize) -> &usize {
a.promote(b)
}

#[no_mangle]
pub fn atest3(a: &DroplessArena) {
a.align(8);
}

#[no_mangle]
pub fn atest4(a: &DroplessArena) {
a.align(16);
}

#[no_mangle]
pub fn atest5(a: &DroplessArena) {
a.align(4);
}

unsafe impl Send for DroplessArena {}
Expand All @@ -310,7 +390,7 @@ impl Default for DroplessArena {

impl DroplessArena {
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
let ptr = ptr as *const u8 as *mut u8;
let ptr = ptr as *const u8 as *mut BackingType;
for chunk in &*self.chunks.borrow() {
if chunk.start() <= ptr && ptr < chunk.end() {
return true;
Expand All @@ -322,62 +402,93 @@ impl DroplessArena {

#[inline]
fn align(&self, align: usize) {
// FIXME: The addition of `align` could overflow, in which case final_address
// will be 0. Do we have any guarantee that our chunk won't end up as the final
// bytes in our memory space?
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
self.ptr.set(final_address as *mut u8);
assert!(self.ptr <= self.end);

// Aligning to the block_size cannot go outside our current chuck, just to its end
if align > BLOCK_SIZE {
// For larger alignments we have to check that we didn't go out of bounds
assert!(self.ptr <= self.end);
}
}

#[inline(never)]
#[cold]
fn grow(&self, needed_bytes: usize) {
unsafe {
let needed_vals = required_backing_types(needed_bytes);
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let used_vals = required_backing_types(used_bytes);
if last_chunk
.storage
.reserve_in_place(used_bytes, needed_bytes)
.reserve_in_place(used_vals, needed_vals)
{
self.end.set(last_chunk.end());
self.end.set(last_chunk.end() as *mut u8);
return;
} else {
new_capacity = last_chunk.storage.cap();
loop {
new_capacity = new_capacity.checked_mul(2).unwrap();
if new_capacity >= used_bytes + needed_bytes {
if new_capacity >= used_vals + needed_vals {
break;
}
}
}
} else {
new_capacity = cmp::max(needed_bytes, PAGE);
new_capacity = cmp::max(needed_vals, required_backing_types(PAGE));
}
chunk = TypedArenaChunk::<u8>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunk = TypedArenaChunk::<BackingType>::new(new_capacity);
self.ptr.set(chunk.start() as *mut u8);
self.end.set(chunk.end() as *mut u8);
chunks.push(chunk);
}
}

#[inline(never)]
#[cold]
fn grow_and_alloc_raw(&self, bytes: usize) -> &mut [u8] {
self.grow(bytes);
unsafe {
self.alloc_raw_unchecked(self.ptr.get(), bytes)
}
}

#[inline(always)]
unsafe fn alloc_raw_unchecked(&self, start: *mut u8, bytes: usize) -> &mut [u8] {
// Tell LLVM that `start` is aligned to BLOCK_SIZE
std::intrinsics::assume(start as usize == align(start as usize, BLOCK_SIZE));

// Set the pointer past ourselves and align it
let end = start.offset(bytes as isize) as usize;
let end = align(end, BLOCK_SIZE) as *mut u8;
self.ptr.set(end);

// Return the result
slice::from_raw_parts_mut(start, bytes)
}

#[inline]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
// FIXME: Always align to 8 bytes here? Or usize alignment
unsafe {
assert!(bytes != 0);
assert!(align <= BLOCK_SIZE);
assert!(std::mem::align_of::<BackingType>() == std::mem::size_of::<BackingType>());
// FIXME: Check that `bytes` fit in a isize

self.align(align);

let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
if (future_end as *mut u8) >= self.end.get() {
self.grow(bytes);
}

// FIXME: arith_offset could overflow here.
// Find some way to guarantee this doesn't happen for small fixed size types
let ptr = self.ptr.get();
// Set the pointer past ourselves
self.ptr.set(
intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8,
);
slice::from_raw_parts_mut(ptr, bytes)
let future_end = intrinsics::arith_offset(ptr, bytes as isize);
if std::intrinsics::unlikely((future_end as *mut u8) >= self.end.get()) {
self.grow_and_alloc_raw(bytes)
} else {
self.alloc_raw_unchecked(ptr, bytes)
}
}
}

Expand Down Expand Up @@ -452,12 +563,39 @@ impl<T> SyncTypedArena<T> {
}
}

#[derive(Default)]
struct DropType {
drop_fn: unsafe fn(*mut u8),
obj: *mut u8,
}

unsafe fn drop_for_type<T>(to_drop: *mut u8) {
std::ptr::drop_in_place(to_drop as *mut T)
}

impl Drop for DropType {
fn drop(&mut self) {
unsafe {
(self.drop_fn)(self.obj)
}
}
}

pub struct SyncDroplessArena {
// Ordered so `deferred` gets dropped before the arena
// since its destructor can reference memory in the arena
deferred: WorkerLocal<TypedArena<DropType>>,
lock: MTLock<DroplessArena>,
}

impl SyncDroplessArena {
#[inline]
pub fn new() -> Self {
SyncDroplessArena {
lock: Default::default(),
deferred: WorkerLocal::new(|_| Default::default()),
}
}

#[inline(always)]
pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
self.lock.lock().in_arena(ptr)
Expand All @@ -483,6 +621,28 @@ impl SyncDroplessArena {
// Extend the lifetime of the result since it's limited to the lock guard
unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
}

#[inline]
pub fn promote<T: DeferDeallocs>(&self, object: T) -> &T {
let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
let result = unsafe {
// Write into uninitialized memory.
ptr::write(mem, object);
&mut *mem
};
// Record the destructor after doing the allocation as that may panic
// and would cause `object` destuctor to run twice if it was recorded before
self.deferred.alloc(DropType {
drop_fn: drop_for_type::<T>,
obj: result as *mut T as *mut u8,
});
result
}

#[inline(always)]
pub fn promote_vec<T: DeferDeallocs>(&self, vec: Vec<T>) -> &[T] {
&self.promote(vec)[..]
}
}

#[cfg(test)]
Expand Down
2 changes: 2 additions & 0 deletions src/librustc/hir/def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,8 @@ pub struct Export {
pub vis: ty::Visibility,
}

impl_defer_dellocs_for_no_drop_type!([] Export);

impl CtorKind {
pub fn from_ast(vdata: &ast::VariantData) -> CtorKind {
match *vdata {
Expand Down
Loading