diff --git a/src/liballoc/abort_adapter.rs b/src/liballoc/abort_adapter.rs new file mode 100644 index 0000000000000..b78ab5a585fc1 --- /dev/null +++ b/src/liballoc/abort_adapter.rs @@ -0,0 +1,118 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![unstable(feature = "allocator_api", + reason = "the precise API and guarantees it provides may be tweaked \ + slightly, especially to possibly take into account the \ + types being stored to make room for a future \ + tracing garbage collector", + issue = "32838")] + +use core::usize; +use core::ptr::NonNull; + +use crate::alloc::*; + +/// An allocator adapter that blows up by calling `Alloc::oom` on all errors. +/// +/// On one hand, concrete allocator implementations should always be written +/// without panicking on user error and OOM to give users maximum +/// flexibility. On the other hand, code that depends on allocation succeeding +/// should depend on `Alloc` to avoid repetitively handling errors from +/// which it cannot recover. +/// +/// This adapter bridges the gap, effectively allowing `Alloc` to be +/// implemented by any allocator. +#[derive(Copy, Clone, Debug, Default)] +pub struct AbortAdapter(pub Alloc); + +impl AllocHelper for AbortAdapter { + type Err = !; +} + +unsafe impl Alloc for AbortAdapter { + unsafe fn alloc(&mut self, layout: Layout) -> Result, Self::Err> { + self.0.alloc(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + self.0.dealloc(ptr, layout) + } + + fn usable_size(&self, layout: &Layout) -> (usize, usize) { + self.0.usable_size(layout) + } + + unsafe fn realloc(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result, Self::Err> { + self.0.realloc(ptr, layout, new_size).or_else(|_| { + let layout = Layout::from_size_align_unchecked(new_size, layout.align()); + handle_alloc_error(layout) + }) + } + + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, Self::Err> { + self.0.alloc_zeroed(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + self.0.alloc_excess(layout).or_else(|_| handle_alloc_error(layout)) + } + + unsafe fn grow_in_place(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result<(), CannotReallocInPlace> { + self.0.grow_in_place(ptr, layout, new_size) + } + + unsafe fn shrink_in_place(&mut self, + ptr: NonNull, + layout: Layout, + new_size: usize) -> Result<(), CannotReallocInPlace> { + self.0.shrink_in_place(ptr, layout, new_size) + } + + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized + { + self.0.alloc_one().or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn dealloc_one(&mut self, ptr: NonNull) + where Self: Sized + { + self.0.dealloc_one(ptr) + } + + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized + { + self.0.alloc_array(n).or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn realloc_array(&mut self, + ptr: NonNull, + n_old: usize, + n_new: usize) -> Result, Self::Err> + where Self: Sized + { + self.0.realloc_array(ptr, n_old, n_new) + .or_else(|_| handle_alloc_error(Layout::new::())) + } + + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized + { + self.0.dealloc_array(ptr, n).or_else(|_| handle_alloc_error(Layout::new::())) + } +} diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 84bd275df347c..f3eca547c8df5 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -40,10 +40,14 @@ extern "Rust" { /// This type implements the [`Alloc`] trait by forwarding calls /// to the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. +#[cfg(not(test))] #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Copy, Clone, Default, Debug)] pub struct Global; +#[cfg(test)] +pub use std::alloc::Global; + /// Allocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::alloc`] method @@ -116,6 +120,13 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { __rust_alloc_zeroed(layout.size(), layout.align()) } +#[cfg(not(test))] +#[unstable(feature = "allocator_api", issue = "32838")] +impl AllocHelper for Global { + type Err = AllocErr; +} + +#[cfg(not(test))] #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl Alloc for Global { #[inline] @@ -165,14 +176,19 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { #[cfg_attr(not(test), lang = "box_free")] #[inline] -pub(crate) unsafe fn box_free(ptr: Unique) { +pub(crate) unsafe fn box_free(ptr: Unique, mut a: A) { + box_free_worker(ptr, &mut a) +} + +#[inline] +pub(crate) unsafe fn box_free_worker(ptr: Unique, a: &mut A) { let ptr = ptr.as_ptr(); let size = size_of_val(&*ptr); let align = min_align_of_val(&*ptr); // We do not allocate for Box when T is ZST, so deallocation is also not necessary. if size != 0 { let layout = Layout::from_size_align_unchecked(size, align); - dealloc(ptr as *mut u8, layout); + a.dealloc(NonNull::new_unchecked(ptr).cast(), layout); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 44f15981137ba..88157087d5d96 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -69,6 +69,8 @@ use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState}; use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; +use abort_adapter::AbortAdapter; +use alloc::{Alloc, AllocHelper, Global, Layout}; use raw_vec::RawVec; use str::from_boxed_utf8_unchecked; @@ -78,7 +80,7 @@ use str::from_boxed_utf8_unchecked; #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Box(Unique); +pub struct Box>(Unique, A); impl Box { /// Allocates memory on the heap and then places `x` into it. @@ -97,6 +99,37 @@ impl Box { } } +impl Box { + /// Allocates memory in the given allocator and then places `x` into it. + /// + /// This doesn't actually allocate if `T` is zero-sized. + /// + /// # Examples + /// + /// ``` + /// # #![feature(allocator_api)] + /// use std::alloc::Global; + /// let five = Box::new_in(5, Global); + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline(always)] + pub fn new_in(x: T, a: A) -> Result, A::Err> { + let mut a = a; + let layout = Layout::for_value(&x); + let size = layout.size(); + let ptr = if size == 0 { + Unique::empty() + } else { + unsafe { + let ptr = a.alloc(layout)?; + ptr::write(ptr.as_ptr() as *mut T, x); + ptr.cast().into() + } + }; + Ok(Box(ptr, a)) + } +} + impl Box { /// Constructs a box from a raw pointer. /// @@ -123,7 +156,35 @@ impl Box { #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { - Box(Unique::new_unchecked(raw)) + Box(Unique::new_unchecked(raw), Default::default()) + } +} + +impl Box { + /// Constructs a box from a raw pointer in the given allocator. + /// + /// This is similar to the [`Box::from_raw`] function, but assumes + /// the pointer was allocated with the given allocator. + /// + /// This function is unsafe because improper use may lead to + /// memory problems. For example, specifying the wrong allocator + /// may corrupt the allocator state. + /// + /// [`Box::from_raw`]: struct.Box.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// # #![feature(allocator_api)] + /// use std::alloc::Global; + /// let x = Box::new_in(5, Global); + /// let ptr = Box::into_raw(x); + /// let x = unsafe { Box::from_raw_in(ptr, Global) }; + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub unsafe fn from_raw_in(raw: *mut T, a: A) -> Self { + Box(Unique::new_unchecked(raw), a) } /// Consumes the `Box`, returning the wrapped raw pointer. @@ -148,7 +209,7 @@ impl Box { /// ``` #[stable(feature = "box_raw", since = "1.4.0")] #[inline] - pub fn into_raw(b: Box) -> *mut T { + pub fn into_raw(b: Self) -> *mut T { Box::into_raw_non_null(b).as_ptr() } @@ -180,19 +241,34 @@ impl Box { /// ``` #[unstable(feature = "box_into_raw_non_null", issue = "47336")] #[inline] - pub fn into_raw_non_null(b: Box) -> NonNull { + pub fn into_raw_non_null(b: Box) -> NonNull { Box::into_unique(b).into() } #[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")] #[inline] #[doc(hidden)] - pub fn into_unique(b: Box) -> Unique { + pub fn into_unique(b: Self) -> Unique { let unique = b.0; mem::forget(b); unique } + + #[unstable(feature = "unique", reason = "needs an RFC to flesh out design", + issue = "27730")] + #[inline] + pub fn into_both(mut b: Self) -> (Unique, A) { + let unique = b.0; + let alloc = unsafe { + let mut a = mem::uninitialized(); + mem::swap(&mut a, &mut b.1); + a + }; + mem::forget(b); + (unique, alloc) + } + /// Consumes and leaks the `Box`, returning a mutable reference, /// `&'a mut T`. Here, the lifetime `'a` may be chosen to be `'static`. /// @@ -234,7 +310,7 @@ impl Box { /// ``` #[stable(feature = "box_leak", since = "1.26.0")] #[inline] - pub fn leak<'a>(b: Box) -> &'a mut T + pub fn leak<'a>(b: Self) -> &'a mut T where T: 'a // Technically not needed, but kept to be explicit. { @@ -242,25 +318,28 @@ impl Box { } } + #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Box { +unsafe impl<#[may_dangle] T: ?Sized, A: Alloc> Drop for Box { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box { - /// Creates a `Box`, with the `Default` value for T. - fn default() -> Box { - box Default::default() +impl + Default> Default for Box { + /// Creates a `Box`, with the `Default` value for T. + fn default() -> Box { + let Ok(b) = Box::new_in(Default::default(), Default::default()); + b } } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for Box<[T]> { - fn default() -> Box<[T]> { - Box::<[T; 0]>::new([]) +impl + Default> Default for Box<[T], A> { + fn default() -> Box<[T], A> { + let Ok(b) = Box::<[T; 0], A>::new_in([], Default::default()); + b } } @@ -272,7 +351,7 @@ impl Default for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Box { +impl + Clone> Clone for Box { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples @@ -283,9 +362,11 @@ impl Clone for Box { /// ``` #[rustfmt_skip] #[inline] - fn clone(&self) -> Box { - box { (**self).clone() } + fn clone(&self) -> Self { + let Ok(b) = Box::new_in((**self).clone(), self.1.clone()); + b } + /// Copies `source`'s contents into `self` without creating a new allocation. /// /// # Examples @@ -299,12 +380,11 @@ impl Clone for Box { /// assert_eq!(*y, 5); /// ``` #[inline] - fn clone_from(&mut self, source: &Box) { + fn clone_from(&mut self, source: &Self) { (**self).clone_from(&(**source)); } } - #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box { fn clone(&self) -> Self { @@ -317,59 +397,65 @@ impl Clone for Box { } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Box { +impl PartialEq for Box { #[inline] - fn eq(&self, other: &Box) -> bool { + fn eq(&self, other: &Box) -> bool { PartialEq::eq(&**self, &**other) } #[inline] - fn ne(&self, other: &Box) -> bool { + fn ne(&self, other: &Box) -> bool { PartialEq::ne(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Box { +impl PartialOrd for Box { #[inline] - fn partial_cmp(&self, other: &Box) -> Option { + fn partial_cmp(&self, other: &Box) -> Option { PartialOrd::partial_cmp(&**self, &**other) } #[inline] - fn lt(&self, other: &Box) -> bool { + fn lt(&self, other: &Box) -> bool { PartialOrd::lt(&**self, &**other) } #[inline] - fn le(&self, other: &Box) -> bool { + fn le(&self, other: &Box) -> bool { PartialOrd::le(&**self, &**other) } #[inline] - fn ge(&self, other: &Box) -> bool { + fn ge(&self, other: &Box) -> bool { PartialOrd::ge(&**self, &**other) } #[inline] - fn gt(&self, other: &Box) -> bool { + fn gt(&self, other: &Box) -> bool { PartialOrd::gt(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Box { +impl Ord for Box { #[inline] - fn cmp(&self, other: &Box) -> Ordering { + fn cmp(&self, other: &Box) -> Ordering { Ord::cmp(&**self, &**other) } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Box {} +impl Eq for Box {} +/// Just the contents are compared, the allocator is ignored #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Box { +impl Hash for Box { fn hash(&self, state: &mut H) { (**self).hash(state); } } +/// Just the contents are compared, the allocator is ignored #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] -impl Hasher for Box { +impl Hasher for Box { fn finish(&self) -> u64 { (**self).finish() } @@ -415,16 +501,19 @@ impl Hasher for Box { } #[stable(feature = "from_for_ptrs", since = "1.6.0")] -impl From for Box { +impl + Default> From for Box { fn from(t: T) -> Self { - Box::new(t) + let Ok(b) = Box::new_in(t, Default::default()); + b } } #[stable(feature = "box_from_slice", since = "1.17.0")] -impl<'a, T: Copy> From<&'a [T]> for Box<[T]> { - fn from(slice: &'a [T]) -> Box<[T]> { - let mut boxed = unsafe { RawVec::with_capacity(slice.len()).into_box() }; +impl<'a, T: Copy, A: Alloc + AllocHelper + Default> From<&'a [T]> for Box<[T], A> { + fn from(slice: &'a [T]) -> Box<[T], A> { + let a = A::default(); + let Ok(vec) = RawVec::with_capacity_in(slice.len(), a); + let mut boxed = unsafe { vec.into_box() }; boxed.copy_from_slice(slice); boxed } @@ -446,7 +535,7 @@ impl From> for Box<[u8]> { } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -468,11 +557,11 @@ impl Box { /// print_if_string(Box::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Box> { + pub fn downcast(self) -> Result, Box> { if self.is::() { unsafe { - let raw: *mut dyn Any = Box::into_raw(self); - Ok(Box::from_raw(raw as *mut T)) + let (u, a) = Box::into_both(self); + Ok(Box::from_raw_in(u.as_ptr() as *mut T, a)) } } else { Err(self) @@ -480,7 +569,7 @@ impl Box { } } -impl Box { +impl Box { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. @@ -502,30 +591,31 @@ impl Box { /// print_if_string(Box::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Box> { - >::downcast(self).map_err(|s| unsafe { + pub fn downcast(self) -> Result, Box> { + >::downcast(self).map_err(|s| unsafe { + let (u, a) = Box::into_both(s); // reapply the Send marker - Box::from_raw(Box::into_raw(s) as *mut (dyn Any + Send)) + Box::from_raw_in(u.as_ptr() as *mut (dyn Any + Send), a) }) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Box { +impl fmt::Display for Box { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Box { +impl fmt::Debug for Box { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Box { +impl fmt::Pointer for Box { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique @@ -535,7 +625,7 @@ impl fmt::Pointer for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Box { +impl Deref for Box { type Target = T; fn deref(&self) -> &T { @@ -544,14 +634,14 @@ impl Deref for Box { } #[stable(feature = "rust1", since = "1.0.0")] -impl DerefMut for Box { +impl DerefMut for Box { fn deref_mut(&mut self) -> &mut T { &mut **self } } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Box { +impl Iterator for Box { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() @@ -564,13 +654,13 @@ impl Iterator for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Box { +impl DoubleEndedIterator for Box { fn next_back(&mut self) -> Option { (**self).next_back() } } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Box { +impl ExactSizeIterator for Box { fn len(&self) -> usize { (**self).len() } @@ -580,7 +670,7 @@ impl ExactSizeIterator for Box { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Box {} +impl FusedIterator for Box {} /// `FnBox` is a version of the `FnOnce` intended for use with boxed @@ -623,10 +713,10 @@ impl FusedIterator for Box {} #[rustc_paren_sugar] #[unstable(feature = "fnbox", reason = "will be deprecated if and when `Box` becomes usable", issue = "28796")] -pub trait FnBox { +pub trait FnBox> { type Output; - fn call_box(self: Box, args: A) -> Self::Output; + fn call_box(self: Box, args: As) -> Self::Output; } #[unstable(feature = "fnbox", @@ -662,13 +752,18 @@ impl<'a, A, R> FnOnce for Box + Send + 'a> { } #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Box {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Box {} #[stable(feature = "box_slice_clone", since = "1.3.0")] -impl Clone for Box<[T]> { +impl Clone for Box<[T], A> { fn clone(&self) -> Self { + let cloned_buf = match RawVec::with_capacity_in(self.len(), self.1.clone()) { + Ok(b) => b, + Err(_) => panic!(), // handle_alloc_error(_); + }; + let mut new = BoxBuilder { - data: RawVec::with_capacity(self.len()), + data: cloned_buf, len: 0, }; @@ -686,20 +781,20 @@ impl Clone for Box<[T]> { return unsafe { new.into_box() }; // Helper type for responding to panics correctly. - struct BoxBuilder { - data: RawVec, + struct BoxBuilder { + data: RawVec, len: usize, } - impl BoxBuilder { - unsafe fn into_box(self) -> Box<[T]> { + impl BoxBuilder { + unsafe fn into_box(self) -> Box<[T], A> { let raw = ptr::read(&self.data); mem::forget(self); raw.into_box() } } - impl Drop for BoxBuilder { + impl Drop for BoxBuilder { fn drop(&mut self) { let mut data = self.data.ptr(); let max = unsafe { data.offset(self.len as isize) }; @@ -716,35 +811,35 @@ impl Clone for Box<[T]> { } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::Borrow for Box { +impl borrow::Borrow for Box { fn borrow(&self) -> &T { &**self } } #[stable(feature = "box_borrow", since = "1.1.0")] -impl borrow::BorrowMut for Box { +impl borrow::BorrowMut for Box { fn borrow_mut(&mut self) -> &mut T { &mut **self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Box { +impl AsRef for Box { fn as_ref(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsMut for Box { +impl AsMut for Box { fn as_mut(&mut self) -> &mut T { &mut **self } } #[unstable(feature = "generator_trait", issue = "43122")] -impl Generator for Box +impl Generator for Box where T: Generator + ?Sized { type Yield = T::Yield; diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs index 9844de9a57d70..15342d179c14f 100644 --- a/src/liballoc/collections/linked_list.rs +++ b/src/liballoc/collections/linked_list.rs @@ -30,6 +30,8 @@ use core::marker::PhantomData; use core::mem; use core::ptr::NonNull; +use abort_adapter::AbortAdapter; +use alloc::{Global, Alloc}; use boxed::Box; use super::SpecExtend; @@ -42,17 +44,19 @@ use super::SpecExtend; /// `LinkedList`. In general, array-based containers are faster, /// more memory efficient and make better use of CPU cache. #[stable(feature = "rust1", since = "1.0.0")] -pub struct LinkedList { - head: Option>>, - tail: Option>>, +pub struct LinkedList> { + head: Option>>, + tail: Option>>, len: usize, - marker: PhantomData>>, + alloc: A, + marker: PhantomData, A>>, } -struct Node { - next: Option>>, - prev: Option>>, +struct Node { + next: Option>>, + prev: Option>>, element: T, + marker: PhantomData, A>>, } /// An iterator over the elements of a `LinkedList`. @@ -63,15 +67,16 @@ struct Node { /// [`iter`]: struct.LinkedList.html#method.iter /// [`LinkedList`]: struct.LinkedList.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct Iter<'a, T: 'a> { - head: Option>>, - tail: Option>>, + +pub struct Iter<'a, T: 'a, A: 'a + Alloc = AbortAdapter > { + head: Option>>, + tail: Option>>, len: usize, - marker: PhantomData<&'a Node>, + marker: PhantomData<&'a Node>, } #[stable(feature = "collection_debug", since = "1.17.0")] -impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { +impl<'a, T: 'a + fmt::Debug, A: 'a + Alloc> fmt::Debug for Iter<'a, T, A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Iter") .field(&self.len) @@ -81,7 +86,7 @@ impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> { // FIXME(#26925) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> Clone for Iter<'a, T> { +impl<'a, T, A: Alloc> Clone for Iter<'a, T, A> { fn clone(&self) -> Self { Iter { ..*self } } @@ -95,10 +100,10 @@ impl<'a, T> Clone for Iter<'a, T> { /// [`iter_mut`]: struct.LinkedList.html#method.iter_mut /// [`LinkedList`]: struct.LinkedList.html #[stable(feature = "rust1", since = "1.0.0")] -pub struct IterMut<'a, T: 'a> { - list: &'a mut LinkedList, - head: Option>>, - tail: Option>>, +pub struct IterMut<'a, T: 'a, A: Alloc + Clone + 'a = AbortAdapter> { + list: &'a mut LinkedList, + head: Option>>, + tail: Option>>, len: usize, } @@ -119,10 +124,16 @@ impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> { /// /// [`into_iter`]: struct.LinkedList.html#method.into_iter /// [`LinkedList`]: struct.LinkedList.html -#[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct IntoIter { - list: LinkedList, +pub struct IntoIter> { + list: LinkedList, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Clone for IntoIter { + fn clone(&self) -> Self { + IntoIter { list: self.list.clone() } + } } #[stable(feature = "collection_debug", since = "1.17.0")] @@ -134,25 +145,26 @@ impl fmt::Debug for IntoIter { } } -impl Node { +impl Node { fn new(element: T) -> Self { Node { next: None, prev: None, element, + marker: PhantomData } } - fn into_element(self: Box) -> T { + fn into_element(self: Box) -> T { self.element } } // private methods -impl LinkedList { +impl LinkedList { /// Adds the given node to the front of the list. #[inline] - fn push_front_node(&mut self, mut node: Box>) { + fn push_front_node(&mut self, mut node: Box, A>) { unsafe { node.next = self.head; node.prev = None; @@ -170,9 +182,9 @@ impl LinkedList { /// Removes and returns the node at the front of the list. #[inline] - fn pop_front_node(&mut self) -> Option>> { + fn pop_front_node(&mut self) -> Option, A>> { self.head.map(|node| unsafe { - let node = Box::from_raw(node.as_ptr()); + let node = Box::from_raw_in(node.as_ptr(), self.alloc.clone()); self.head = node.next; match self.head { @@ -187,7 +199,7 @@ impl LinkedList { /// Adds the given node to the back of the list. #[inline] - fn push_back_node(&mut self, mut node: Box>) { + fn push_back_node(&mut self, mut node: Box, A>) { unsafe { node.next = None; node.prev = self.tail; @@ -205,9 +217,9 @@ impl LinkedList { /// Removes and returns the node at the back of the list. #[inline] - fn pop_back_node(&mut self) -> Option>> { + fn pop_back_node(&mut self) -> Option, A>> { self.tail.map(|node| unsafe { - let node = Box::from_raw(node.as_ptr()); + let node = Box::from_raw_in(node.as_ptr(), self.alloc.clone()); self.tail = node.prev; match self.tail { @@ -224,7 +236,7 @@ impl LinkedList { /// /// Warning: this will not check that the provided node belongs to the current list. #[inline] - unsafe fn unlink_node(&mut self, mut node: NonNull>) { + unsafe fn unlink_node(&mut self, mut node: NonNull>) { let node = node.as_mut(); match node.prev { @@ -265,10 +277,58 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> Self { + Self::new_in(Default::default()) + } + + /// Appends an element to the back of a list + /// + /// # Examples + /// + /// ``` + /// use std::collections::LinkedList; + /// + /// let mut d = LinkedList::new(); + /// d.push_back(1); + /// d.push_back(3); + /// assert_eq!(3, *d.back().unwrap()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn push_back(&mut self, elt: T) { + let Ok(()) = self.push_back_alloc(elt); + } + + /// Adds an element first in the list. + /// + /// This operation should compute in O(1) time. + /// + /// # Examples + /// + /// ``` + /// use std::collections::LinkedList; + /// + /// let mut dl = LinkedList::new(); + /// + /// dl.push_front(2); + /// assert_eq!(dl.front().unwrap(), &2); + /// + /// dl.push_front(1); + /// assert_eq!(dl.front().unwrap(), &1); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn push_front(&mut self, elt: T) { + let Ok(()) = self.push_front_alloc(elt); + } +} + +impl LinkedList { + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn new_in(a: A) -> Self { LinkedList { head: None, tail: None, len: 0, + alloc: a, marker: PhantomData, } } @@ -341,7 +401,7 @@ impl LinkedList { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn iter(&self) -> Iter { + pub fn iter(&self) -> Iter { Iter { head: self.head, tail: self.tail, @@ -375,7 +435,7 @@ impl LinkedList { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn iter_mut(&mut self) -> IterMut { + pub fn iter_mut(&mut self) -> IterMut { IterMut { head: self.head, tail: self.tail, @@ -454,7 +514,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn clear(&mut self) { - *self = Self::new(); + *self = Self::new_in(self.alloc.clone()); } /// Returns `true` if the `LinkedList` contains an element equal to the @@ -599,8 +659,10 @@ impl LinkedList { /// assert_eq!(dl.front().unwrap(), &1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn push_front(&mut self, elt: T) { - self.push_front_node(box Node::new(elt)); + pub fn push_front_alloc(&mut self, elt: T) -> Result<(), A::Err> { + let alloc = self.alloc.clone(); + self.push_front_node(Box::new_in(Node::new(elt), alloc)?); + Ok(()) } /// Removes the first element and returns it, or `None` if the list is @@ -627,21 +689,11 @@ impl LinkedList { self.pop_front_node().map(Node::into_element) } - /// Appends an element to the back of a list - /// - /// # Examples - /// - /// ``` - /// use std::collections::LinkedList; - /// - /// let mut d = LinkedList::new(); - /// d.push_back(1); - /// d.push_back(3); - /// assert_eq!(3, *d.back().unwrap()); - /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn push_back(&mut self, elt: T) { - self.push_back_node(box Node::new(elt)); + pub fn push_back_alloc(&mut self, elt: T) -> Result<(), A::Err> { + let alloc = self.alloc.clone(); + self.push_back_node(Box::new_in(Node::new(elt), alloc)?); + Ok(()) } /// Removes the last element from a list and returns it, or `None` if @@ -689,13 +741,14 @@ impl LinkedList { /// assert_eq!(splitted.pop_front(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn split_off(&mut self, at: usize) -> LinkedList { + pub fn split_off(&mut self, at: usize) -> Self { let len = self.len(); assert!(at <= len, "Cannot split off at a nonexistent index"); if at == 0 { - return mem::replace(self, Self::new()); + let alloc = self.alloc.clone(); + return mem::replace(self, Self::new_in(alloc)); } else if at == len { - return Self::new(); + return Self::new_in(self.alloc.clone()); } // Below, we iterate towards the `i-1`th node, either from the start or the end, @@ -733,6 +786,7 @@ impl LinkedList { head: second_part_head, tail: self.tail, len: len - at, + alloc: self.alloc.clone(), marker: PhantomData, }; @@ -770,7 +824,7 @@ impl LinkedList { /// assert_eq!(odds.into_iter().collect::>(), vec![1, 3, 5, 9, 11, 13, 15]); /// ``` #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] - pub fn drain_filter(&mut self, filter: F) -> DrainFilter + pub fn drain_filter(&mut self, filter: F) -> DrainFilter where F: FnMut(&mut T) -> bool { // avoid borrow issues. @@ -788,14 +842,14 @@ impl LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T> Drop for LinkedList { +unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for LinkedList { fn drop(&mut self) { while let Some(_) = self.pop_front_node() {} } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> Iterator for Iter<'a, T> { +impl<'a, T, A: Alloc> Iterator for Iter<'a, T, A> { type Item = &'a T; #[inline] @@ -820,7 +874,7 @@ impl<'a, T> Iterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> DoubleEndedIterator for Iter<'a, T> { +impl<'a, T, A: Alloc> DoubleEndedIterator for Iter<'a, T, A> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.len == 0 { @@ -838,13 +892,13 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for Iter<'a, T> {} +impl<'a, T, A: Alloc> ExactSizeIterator for Iter<'a, T, A> {} #[stable(feature = "fused", since = "1.26.0")] -impl<'a, T> FusedIterator for Iter<'a, T> {} +impl<'a, T, A: Alloc> FusedIterator for Iter<'a, T, A> {} #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> Iterator for IterMut<'a, T> { +impl<'a, T, A: Alloc + Clone> Iterator for IterMut<'a, T, A> { type Item = &'a mut T; #[inline] @@ -869,7 +923,7 @@ impl<'a, T> Iterator for IterMut<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { +impl<'a, T, A: Alloc + Clone> DoubleEndedIterator for IterMut<'a, T, A> { #[inline] fn next_back(&mut self) -> Option<&'a mut T> { if self.len == 0 { @@ -887,10 +941,10 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} +impl<'a, T, A: Alloc + Clone> ExactSizeIterator for IterMut<'a, T, A> {} #[stable(feature = "fused", since = "1.26.0")] -impl<'a, T> FusedIterator for IterMut<'a, T> {} +impl<'a, T, A: Alloc + Clone> FusedIterator for IterMut<'a, T, A> {} impl<'a, T> IterMut<'a, T> { /// Inserts the given element just after the element most recently returned by `.next()`. @@ -921,26 +975,35 @@ impl<'a, T> IterMut<'a, T> { reason = "this is probably better handled by a cursor type -- we'll see", issue = "27794")] pub fn insert_next(&mut self, element: T) { - match self.head { - None => self.list.push_back(element), + let Ok(()) = self.insert_next_alloc(element); + } +} + +impl<'a, T, A: Alloc + Clone> IterMut<'a, T, A> { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn insert_next_alloc(&mut self, element: T) -> Result<(), A::Err> { + Ok(match self.head { + None => self.list.push_back_alloc(element)?, Some(mut head) => unsafe { let mut prev = match head.as_ref().prev { - None => return self.list.push_front(element), + None => return self.list.push_front_alloc(element), Some(prev) => prev, }; - let node = Some(Box::into_raw_non_null(box Node { + let node = Some(NonNull::from(Box::into_unique(Box::new_in(Node { next: Some(head), prev: Some(prev), element, - })); + marker: PhantomData, + }, self.list.alloc.clone())?))); prev.as_mut().next = node; head.as_mut().prev = node; self.list.len += 1; }, - } + }) } /// Provides a reference to the next element, without changing the iterator. @@ -977,19 +1040,20 @@ impl<'a, T> IterMut<'a, T> { /// An iterator produced by calling `drain_filter` on LinkedList. #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -pub struct DrainFilter<'a, T: 'a, F: 'a> - where F: FnMut(&mut T) -> bool, +pub struct DrainFilter<'a, T: 'a, F: 'a, A: 'a = AbortAdapter> + where A: Alloc + Clone, F: FnMut(&mut T) -> bool, { - list: &'a mut LinkedList, - it: Option>>, + list: &'a mut LinkedList, + it: Option>>, pred: F, idx: usize, old_len: usize, } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl<'a, T, F> Iterator for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool, +impl<'a, T, F, A> Iterator for DrainFilter<'a, T, F, A> + where T: 'a, A: 'a + Alloc + Clone, + F: FnMut(&mut T) -> bool, { type Item = T; @@ -1015,7 +1079,7 @@ impl<'a, T, F> Iterator for DrainFilter<'a, T, F> } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl<'a, T, F> Drop for DrainFilter<'a, T, F> +impl<'a, T, F, A: Alloc + Clone> Drop for DrainFilter<'a, T, F, A> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { @@ -1024,8 +1088,9 @@ impl<'a, T, F> Drop for DrainFilter<'a, T, F> } #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] -impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F> - where F: FnMut(&mut T) -> bool +impl<'a, T, A, F> fmt::Debug for DrainFilter<'a, T, F, A> + where T: 'a + fmt::Debug, A: 'a + Alloc + Clone, + F: FnMut(&mut T) -> bool { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("DrainFilter") @@ -1035,7 +1100,7 @@ impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F> } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = T; #[inline] @@ -1050,7 +1115,7 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.list.pop_back() @@ -1058,10 +1123,10 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter {} +impl ExactSizeIterator for IntoIter {} #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for LinkedList { @@ -1073,67 +1138,67 @@ impl FromIterator for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for LinkedList { +impl IntoIterator for LinkedList { type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Consumes the list into an iterator yielding elements by value. #[inline] - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter { list: self } } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> IntoIterator for &'a LinkedList { +impl<'a, T, A: Alloc + Clone> IntoIterator for &'a LinkedList { type Item = &'a T; - type IntoIter = Iter<'a, T>; + type IntoIter = Iter<'a, T, A>; - fn into_iter(self) -> Iter<'a, T> { + fn into_iter(self) -> Iter<'a, T, A> { self.iter() } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> IntoIterator for &'a mut LinkedList { +impl<'a, T, A: Alloc + Clone> IntoIterator for &'a mut LinkedList { type Item = &'a mut T; - type IntoIter = IterMut<'a, T>; + type IntoIter = IterMut<'a, T, A>; - fn into_iter(self) -> IterMut<'a, T> { + fn into_iter(self) -> IterMut<'a, T, A> { self.iter_mut() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for LinkedList { +impl + Clone> Extend for LinkedList { fn extend>(&mut self, iter: I) { >::spec_extend(self, iter); } } -impl SpecExtend for LinkedList { +impl + Clone> SpecExtend for LinkedList { default fn spec_extend(&mut self, iter: I) { for elt in iter { - self.push_back(elt); + let Ok(()) = self.push_back_alloc(elt); } } } -impl SpecExtend> for LinkedList { - fn spec_extend(&mut self, ref mut other: LinkedList) { +impl + Clone> SpecExtend> for LinkedList { + fn spec_extend(&mut self, ref mut other: LinkedList) { self.append(other); } } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList { +impl<'a, T: 'a + Copy, A: Alloc + Clone> Extend<&'a T> for LinkedList { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for LinkedList { +impl PartialEq for LinkedList { fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().eq(other) } @@ -1144,17 +1209,17 @@ impl PartialEq for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for LinkedList {} +impl Eq for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for LinkedList { +impl PartialOrd for LinkedList { fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for LinkedList { +impl Ord for LinkedList { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other) @@ -1169,14 +1234,14 @@ impl Clone for LinkedList { } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for LinkedList { +impl fmt::Debug for LinkedList { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for LinkedList { +impl Hash for LinkedList { fn hash(&self, state: &mut H) { self.len().hash(state); for elt in self { @@ -1200,10 +1265,10 @@ fn assert_covariance() { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for LinkedList {} +unsafe impl Send for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for LinkedList {} +unsafe impl Sync for LinkedList {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<'a, T: Sync> Send for Iter<'a, T> {} @@ -1226,16 +1291,18 @@ mod tests { use super::{LinkedList, Node}; + type GlobalNode = Node>; + #[cfg(test)] - fn list_from(v: &[T]) -> LinkedList { + fn list_from(v: &[T]) -> LinkedList { v.iter().cloned().collect() } - pub fn check_links(list: &LinkedList) { + pub fn check_links(list: &LinkedList) { unsafe { let mut len = 0; - let mut last_ptr: Option<&Node> = None; - let mut node_ptr: &Node; + let mut last_ptr: Option<&GlobalNode> = None; + let mut node_ptr: &GlobalNode; match list.head { None => { // tail node should also be None. @@ -1250,7 +1317,7 @@ mod tests { (None, None) => {} (None, _) => panic!("prev link for head"), (Some(p), Some(pptr)) => { - assert_eq!(p as *const Node, pptr.as_ptr() as *const Node); + assert_eq!(p as *const GlobalNode, pptr.as_ptr() as *const GlobalNode); } _ => panic!("prev link is none, not good"), } @@ -1269,7 +1336,7 @@ mod tests { // verify that the tail node points to the last node. let tail = list.tail.as_ref().expect("some tail node").as_ref(); - assert_eq!(tail as *const Node, node_ptr as *const Node); + assert_eq!(tail as *const GlobalNode, node_ptr as *const GlobalNode); // check that len matches interior links. assert_eq!(len, list.len); } diff --git a/src/liballoc/collections/mod.rs b/src/liballoc/collections/mod.rs index 96e0eb633b2f5..f633e493154d8 100644 --- a/src/liballoc/collections/mod.rs +++ b/src/liballoc/collections/mod.rs @@ -56,24 +56,24 @@ use alloc::{AllocErr, LayoutErr}; /// Augments `AllocErr` with a CapacityOverflow variant. #[derive(Clone, PartialEq, Eq, Debug)] #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -pub enum CollectionAllocErr { +pub enum CollectionAllocErr { /// Error due to the computed capacity exceeding the collection's maximum /// (usually `isize::MAX` bytes). CapacityOverflow, /// Error due to the allocator (see the `AllocErr` type's docs). - AllocErr, + AllocErr(E), } #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -impl From for CollectionAllocErr { +impl From for CollectionAllocErr { #[inline] fn from(AllocErr: AllocErr) -> Self { - CollectionAllocErr::AllocErr + CollectionAllocErr::AllocErr(AllocErr) } } #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] -impl From for CollectionAllocErr { +impl From for CollectionAllocErr { #[inline] fn from(_: LayoutErr) -> Self { CollectionAllocErr::CapacityOverflow diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs index ba92b886138c0..fb60359aa9b60 100644 --- a/src/liballoc/collections/vec_deque.rs +++ b/src/liballoc/collections/vec_deque.rs @@ -30,6 +30,7 @@ use core::slice; use core::hash::{Hash, Hasher}; use core::cmp; +use alloc::AllocErr; use collections::CollectionAllocErr; use raw_vec::RawVec; use vec::Vec; @@ -558,7 +559,7 @@ impl VecDeque { .expect("capacity overflow"); if new_cap > old_cap { - self.buf.reserve_exact(used_cap, new_cap - used_cap); + let Ok(()) = self.buf.reserve_exact(used_cap, new_cap - used_cap); unsafe { self.handle_cap_increase(old_cap); } @@ -602,7 +603,7 @@ impl VecDeque { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.try_reserve(additional) } @@ -640,7 +641,7 @@ impl VecDeque { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { let old_cap = self.cap(); let used_cap = self.len() + 1; let new_cap = used_cap.checked_add(additional) @@ -757,7 +758,7 @@ impl VecDeque { debug_assert!(self.head < self.tail); } - self.buf.shrink_to_fit(target_cap); + let Ok(()) = self.buf.shrink_to_fit(target_cap); debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); @@ -1877,7 +1878,7 @@ impl VecDeque { fn grow_if_necessary(&mut self) { if self.is_full() { let old_cap = self.cap(); - self.buf.double(); + let Ok(()) = self.buf.double(); unsafe { self.handle_cap_increase(old_cap); } @@ -2528,7 +2529,7 @@ impl From> for VecDeque { if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) || (buf.cap() == len) { let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); - buf.reserve_exact(len, cap - len); + let Ok(()) = buf.reserve_exact(len, cap - len); } VecDeque { diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 63cf01a0facbc..d59fb547f7f6a 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -123,6 +123,9 @@ #![feature(inclusive_range_methods)] #![feature(rustc_const_unstable)] #![feature(const_vec_new)] +#![feature(never_type)] +#![feature(crate_in_paths)] +#![feature(exhaustive_patterns)] #![cfg_attr(not(test), feature(fn_traits, i128))] #![cfg_attr(test, feature(test))] @@ -144,6 +147,7 @@ mod macros; // Heaps provided for low-level allocation strategies pub mod alloc; +pub mod abort_adapter; #[unstable(feature = "futures_api", reason = "futures in libcore are unstable", diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 4f2686abf4515..dddbc51027120 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -17,7 +17,8 @@ use core::ops::Drop; use core::ptr::{self, NonNull, Unique}; use core::slice; -use alloc::{Alloc, Layout, Global, handle_alloc_error}; +use abort_adapter::AbortAdapter; +use alloc::{Alloc, Layout, Global}; use collections::CollectionAllocErr; use collections::CollectionAllocErr::*; use boxed::Box; @@ -32,7 +33,6 @@ use boxed::Box; /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics) /// * Guards against 32-bit systems allocating more than isize::MAX bytes /// * Guards against overflowing your length -/// * Aborts on OOM /// * Avoids freeing Unique::empty() /// * Contains a ptr::Unique and thus endows the user with all related benefits /// @@ -50,7 +50,7 @@ use boxed::Box; /// field. This allows zero-sized types to not be special-cased by consumers of /// this type. #[allow(missing_debug_implementations)] -pub struct RawVec { +pub struct RawVec> { ptr: Unique, cap: usize, a: A, @@ -76,23 +76,24 @@ impl RawVec { /// Like `with_capacity` but parameterized over the choice of /// allocator for the returned RawVec. #[inline] - pub fn with_capacity_in(cap: usize, a: A) -> Self { - RawVec::allocate_in(cap, false, a) + pub fn with_capacity_in(cap: usize, a: A) -> Result { + RawVec::allocate_in(cap, false, a).map_err(handle_overflow_error) } /// Like `with_capacity_zeroed` but parameterized over the choice /// of allocator for the returned RawVec. #[inline] - pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Self { - RawVec::allocate_in(cap, true, a) + pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Result { + RawVec::allocate_in(cap, true, a).map_err(handle_overflow_error) } - fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self { - unsafe { + fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Result> { + unsafe { let elem_size = mem::size_of::(); - let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow()); - alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow()); + let alloc_size = cap.checked_mul(elem_size) + .ok_or(CollectionAllocErr::CapacityOverflow)?; + alloc_guard(alloc_size)?; // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { @@ -100,34 +101,30 @@ impl RawVec { } else { let align = mem::align_of::(); let layout = Layout::from_size_align(alloc_size, align).unwrap(); - let result = if zeroed { + if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) - }; - match result { - Ok(ptr) => ptr.cast(), - Err(_) => handle_alloc_error(layout), - } + }.map_err(CollectionAllocErr::AllocErr)?.cast() }; - RawVec { + Ok(RawVec { ptr: ptr.into(), cap, a, - } + }) } } } -impl RawVec { +impl RawVec { /// Creates the biggest possible RawVec (on the system heap) /// without allocating. If T has positive size, then this makes a /// RawVec with capacity 0. If T has 0 size, then it makes a /// RawVec with capacity `usize::MAX`. Useful for implementing /// delayed allocation. pub const fn new() -> Self { - Self::new_in(Global) + Self::new_in(AbortAdapter(Global)) } /// Creates a RawVec (on the system heap) with exactly the @@ -141,19 +138,19 @@ impl RawVec { /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM #[inline] pub fn with_capacity(cap: usize) -> Self { - RawVec::allocate_in(cap, false, Global) + let Ok(v) = RawVec::allocate_in(cap, false, Default::default()) + .map_err(handle_overflow_error); + v } /// Like `with_capacity` but guarantees the buffer is zeroed. #[inline] pub fn with_capacity_zeroed(cap: usize) -> Self { - RawVec::allocate_in(cap, true, Global) + let Ok(v) = RawVec::allocate_in(cap, true, Default::default()) + .map_err(handle_overflow_error); + v } } @@ -174,7 +171,7 @@ impl RawVec { } } -impl RawVec { +impl RawVec { /// Reconstitutes a RawVec from a pointer, capacity. /// /// # Undefined Behavior @@ -186,7 +183,7 @@ impl RawVec { RawVec { ptr: Unique::new_unchecked(ptr), cap, - a: Global, + a: Default::default(), } } @@ -260,10 +257,6 @@ impl RawVec { /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// - /// # Aborts - /// - /// Aborts on OOM - /// /// # Examples /// /// ``` @@ -294,7 +287,7 @@ impl RawVec { /// ``` #[inline(never)] #[cold] - pub fn double(&mut self) { + pub fn double(&mut self) -> Result<(), A::Err> { unsafe { let elem_size = mem::size_of::(); @@ -316,29 +309,23 @@ impl RawVec { // `from_size_align_unchecked`. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); - let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), - cur, - new_size); - match ptr_res { - Ok(ptr) => (new_cap, ptr.cast().into()), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, cur.align()) - ), - } + alloc_guard::(new_size).unwrap_or_else(|_| capacity_overflow()); + let ptr = self.a.realloc(NonNull::from(self.ptr).cast(), + cur, + new_size)?; + (new_cap, ptr.cast().into()) } None => { // skip to 4 because tiny Vec's are dumb; but not if that // would cause overflow let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; - match self.a.alloc_array::(new_cap) { - Ok(ptr) => (new_cap, ptr.into()), - Err(_) => handle_alloc_error(Layout::array::(new_cap).unwrap()), - } + let ptr = self.a.alloc_array::(new_cap)?; + (new_cap, ptr.cast().into()) } }; self.ptr = uniq; self.cap = new_cap; + Ok(()) } } @@ -377,7 +364,7 @@ impl RawVec { // overflow and the alignment is sufficiently small. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; - alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); + alloc_guard::(new_size).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) { Ok(_) => { // We can't directly divide `size`. @@ -391,13 +378,6 @@ impl RawVec { } } - /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. - pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - - self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact) - } - /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already, /// will reallocate the minimum possible amount of memory necessary. @@ -414,23 +394,15 @@ impl RawVec { /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. - /// - /// # Aborts - /// - /// Aborts on OOM - pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) { - Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => unreachable!(), - Ok(()) => { /* yay */ } - } + pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), A::Err>{ + self.reserve_internal_2(used_cap, needed_extra_cap, Exact) } /// Calculates the buffer's new size given that it'll hold `used_cap + /// needed_extra_cap` elements. This logic is used in amortized reserve methods. /// Returns `(new_capacity, new_alloc_size)`. fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize) - -> Result { + -> Result> { // Nothing we can really do about these checks :( let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?; @@ -440,12 +412,6 @@ impl RawVec { Ok(cmp::max(double_cap, required_cap)) } - /// The same as `reserve`, but returns on errors instead of panicking or aborting. - pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) - -> Result<(), CollectionAllocErr> { - self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized) - } - /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate enough space plus comfortable slack @@ -464,10 +430,6 @@ impl RawVec { /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// - /// # Aborts - /// - /// Aborts on OOM - /// /// # Examples /// /// ``` @@ -498,12 +460,8 @@ impl RawVec { /// # vector.push_all(&[1, 3, 5, 7, 9]); /// # } /// ``` - pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) { - Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => unreachable!(), - Ok(()) => { /* yay */ } - } + pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), A::Err> { + self.reserve_internal_2(used_cap, needed_extra_cap, Amortized) } /// Attempts to ensure that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have @@ -549,7 +507,7 @@ impl RawVec { let new_layout = Layout::new::().repeat(new_cap).unwrap().0; // FIXME: may crash and burn on over-reserve - alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); + alloc_guard::(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place( NonNull::from(self.ptr).cast(), old_layout, new_layout.size(), ) { @@ -570,17 +528,13 @@ impl RawVec { /// # Panics /// /// Panics if the given amount is *larger* than the current capacity. - /// - /// # Aborts - /// - /// Aborts on OOM. - pub fn shrink_to_fit(&mut self, amount: usize) { + pub fn shrink_to_fit(&mut self, amount: usize) -> Result<(), A::Err> { let elem_size = mem::size_of::(); // Set the `cap` because they might be about to promote to a `Box<[T]>` if elem_size == 0 { self.cap = amount; - return; + return Ok(()); } // This check is my waterloo; it's the only thing Vec wouldn't have to do. @@ -612,26 +566,40 @@ impl RawVec { let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); - match self.a.realloc(NonNull::from(self.ptr).cast(), - old_layout, - new_size) { - Ok(p) => self.ptr = p.cast().into(), - Err(_) => handle_alloc_error( - Layout::from_size_align_unchecked(new_size, align) - ), - } + let ptr = self.a.realloc(NonNull::from(self.ptr).cast(), + old_layout, + new_size)?; + self.ptr = ptr.cast().into(); } self.cap = amount; } + Ok(()) } -} -enum Fallibility { - Fallible, - Infallible, + // Reborrow a `RawVec` as one which does diverge on allocation failures. + pub fn as_infallible(&mut self) -> &mut RawVec> { + unsafe { ::core::mem::transmute(self) } + } } -use self::Fallibility::*; +impl RawVec> { + // Reborrow a `RawVec` as one which doesn't diverge on allocation failures. + pub fn as_fallible(&mut self) -> &mut RawVec { + unsafe { ::core::mem::transmute(self) } + } + + /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. + pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), CollectionAllocErr> { + self.as_fallible().reserve_internal(used_cap, needed_extra_cap, Exact) + } + + /// The same as `reserve`, but returns on errors instead of panicking or aborting. + pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) + -> Result<(), CollectionAllocErr> { + self.as_fallible().reserve_internal(used_cap, needed_extra_cap, Amortized) + } +} enum ReserveStrategy { Exact, @@ -645,12 +613,9 @@ impl RawVec { &mut self, used_cap: usize, needed_extra_cap: usize, - fallibility: Fallibility, strategy: ReserveStrategy, - ) -> Result<(), CollectionAllocErr> { + ) -> Result<(), CollectionAllocErr> { unsafe { - use alloc::AllocErr; - // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. // If we make it past the first branch then we are guaranteed to @@ -671,30 +636,38 @@ impl RawVec { alloc_guard(new_layout.size())?; - let res = match self.current_layout() { + let ptr = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size()) } None => self.a.alloc(new_layout), - }; + }.map_err(CollectionAllocErr::AllocErr)?; - match (&res, fallibility) { - (Err(AllocErr), Infallible) => handle_alloc_error(new_layout), - _ => {} - } - - self.ptr = res?.cast().into(); + self.ptr = ptr.cast().into(); self.cap = new_cap; Ok(()) } } + /// Like the above, but throws away capacity overflow errors + fn reserve_internal_2( + &mut self, + used_cap: usize, + needed_extra_cap: usize, + strategy: ReserveStrategy, + ) -> Result<(), A::Err> { + match self.reserve_internal(used_cap, needed_extra_cap, strategy) { + Err(CapacityOverflow) => capacity_overflow(), + Err(CollectionAllocErr::AllocErr(e)) => Err(e), + Ok(()) => Ok(()), + } + } } -impl RawVec { - /// Converts the entire buffer into `Box<[T]>`. +impl RawVec { + /// Converts the entire buffer into `Box<[T], A>`. /// /// While it is not *strictly* Undefined Behavior to call /// this procedure while some of the RawVec is uninitialized, @@ -702,10 +675,11 @@ impl RawVec { /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (see description of type for details) - pub unsafe fn into_box(self) -> Box<[T]> { + pub unsafe fn into_box(self) -> Box<[T], A> { // NOTE: not calling `cap()` here, actually using the real `cap` field! let slice = slice::from_raw_parts_mut(self.ptr(), self.cap); - let output: Box<[T]> = Box::from_raw(slice); + let a = ptr::read(&self.a); + let output: Box<[T], A> = Box::from_raw_in(slice, a); mem::forget(self); output } @@ -742,7 +716,7 @@ unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec { // all 4GB in user-space. e.g. PAE or x32 #[inline] -fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { +fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { if mem::size_of::() < 8 && alloc_size > ::core::isize::MAX as usize { Err(CapacityOverflow) } else { @@ -750,6 +724,15 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { } } +/// Reduce error to just allocation error +#[inline] +fn handle_overflow_error(err: CollectionAllocErr) -> E { + match err { + CollectionAllocErr::AllocErr(e) => e, + CollectionAllocErr::CapacityOverflow => capacity_overflow(), + } +} + // One central function responsible for reporting capacity overflows. This'll // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index d76acb28df92b..a26410e649934 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -13,7 +13,7 @@ //! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference //! Counted'. //! -//! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, +//! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, //! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new //! pointer to the same value in the heap. When the last [`Rc`] pointer to a //! given value is destroyed, the pointed-to value is also destroyed. @@ -41,9 +41,9 @@ //! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from //! children back to their parents. //! -//! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), -//! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name -//! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are [associated +//! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), +//! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name +//! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are [associated //! functions][assoc], called using function-like syntax: //! //! ``` @@ -53,13 +53,13 @@ //! Rc::downgrade(&my_rc); //! ``` //! -//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have +//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have //! already been destroyed. //! //! # Cloning references //! //! Creating a new reference from an existing reference counted pointer is done using the -//! `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. +//! `Clone` trait implemented for [`Rc`][`Rc`] and [`Weak`][`Weak`]. //! //! ``` //! use std::rc::Rc; @@ -256,16 +256,18 @@ use core::marker::{Unsize, PhantomData}; use core::mem::{self, align_of_val, forget, size_of_val}; use core::ops::Deref; use core::ops::CoerceUnsized; -use core::ptr::{self, NonNull}; +use core::ptr::{self, NonNull, Unique}; use core::convert::From; -use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use abort_adapter::AbortAdapter; +use alloc::{Global, Alloc, Layout, box_free_worker}; use string::String; use vec::Vec; -struct RcBox { +struct RcBox> { strong: Cell, weak: Cell, + alloc: A, value: T, } @@ -281,18 +283,18 @@ struct RcBox { /// /// [get_mut]: #method.get_mut #[stable(feature = "rust1", since = "1.0.0")] -pub struct Rc { - ptr: NonNull>, +pub struct Rc> { + ptr: NonNull>, phantom: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] -impl !marker::Send for Rc {} +impl !marker::Send for Rc {} #[stable(feature = "rust1", since = "1.0.0")] -impl !marker::Sync for Rc {} +impl !marker::Sync for Rc {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Rc {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Rc {} impl Rc { /// Constructs a new `Rc`. @@ -304,20 +306,31 @@ impl Rc { /// /// let five = Rc::new(5); /// ``` + #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - Rc { - // there is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - ptr: Box::into_raw_non_null(box RcBox { - strong: Cell::new(1), - weak: Cell::new(1), - value, - }), - phantom: PhantomData, - } + let Ok(a) = Self::new_in(value, Default::default()); + a + } +} + +impl Rc { + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(value: T, alloc: A) -> Result, A::Err> { + // there is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + let x: Box<_, A> = Box::new_in(RcBox { + strong: Cell::new(1), + weak: Cell::new(1), + alloc: unsafe { mem::uninitialized() }, + value, + }, alloc)?; + let (unique, mut alloc_after) = Box::into_both(x); + mem::swap(unsafe { &mut (*unique.as_ptr()).alloc }, &mut alloc_after); + mem::forget(alloc_after); + Ok(Rc { ptr: NonNull::from(unique), phantom: PhantomData }) } /// Returns the contained value, if the `Rc` has exactly one strong reference. @@ -364,29 +377,6 @@ impl Rc { } impl Rc { - /// Consumes the `Rc`, returning the wrapped pointer. - /// - /// To avoid a memory leak the pointer must be converted back to an `Rc` using - /// [`Rc::from_raw`][from_raw]. - /// - /// [from_raw]: struct.Rc.html#method.from_raw - /// - /// # Examples - /// - /// ``` - /// use std::rc::Rc; - /// - /// let x = Rc::new(10); - /// let x_ptr = Rc::into_raw(x); - /// assert_eq!(unsafe { *x_ptr }, 10); - /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub fn into_raw(this: Self) -> *const T { - let ptr: *const T = &*this; - mem::forget(this); - ptr - } - /// Constructs an `Rc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to a @@ -417,14 +407,44 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { + Self::from_raw_in(ptr) + } +} + +impl Rc { + /// Consumes the `Rc`, returning the wrapped pointer. + /// + /// To avoid a memory leak the pointer must be converted back to an `Rc` using + /// [`Rc::from_raw`][from_raw]. + /// + /// [from_raw]: struct.Rc.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let x = Rc::new(10); + /// let x_ptr = Rc::into_raw(x); + /// assert_eq!(unsafe { *x_ptr }, 10); + /// ``` + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr: *const T = &*this; + mem::forget(this); + ptr + } + + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T) -> Self { // Align the unsized value to the end of the RcBox. // Because it is ?Sized, it will always be the last field in memory. let align = align_of_val(&*ptr); - let layout = Layout::new::>(); + let layout = Layout::new::>(); let offset = (layout.size() + layout.padding_needed_for(align)) as isize; // Reverse the offset to find the original RcBox. - let fake_ptr = ptr as *mut RcBox; + let fake_ptr = ptr as *mut RcBox; let rc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Rc { @@ -447,7 +467,7 @@ impl Rc { /// let weak_five = Rc::downgrade(&five); /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak { this.inc_weak(); Weak { ptr: this.ptr } } @@ -618,7 +638,7 @@ impl Rc { } } -impl Rc { +impl Rc { #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Rc` to a concrete type. @@ -641,55 +661,86 @@ impl Rc { /// print_if_string(Rc::new(0i8)); /// } /// ``` - pub fn downcast(self) -> Result, Rc> { + pub fn downcast(self) -> Result, Rc> { if (*self).is::() { - let ptr = self.ptr.cast::>(); - forget(self); - Ok(Rc { ptr, phantom: PhantomData }) + // avoid the pointer arithmetic in from_raw + unsafe { + let raw: *const RcBox = self.ptr.as_ptr(); + forget(self); + Ok(Rc { + ptr: NonNull::new_unchecked(raw as *const RcBox as *mut _), + phantom: PhantomData, + }) + } } else { Err(self) } } } - impl Rc { + fn from_box(v: Box) -> Self { + let Ok(a) = Self::from_box_in(v, Default::default()); + a + } +} + +impl Rc { // Allocates an `RcBox` with sufficient space for an unsized value - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { + unsafe fn allocate_for_ptr(ptr: *const T, mut alloc: A) -> Result<*mut RcBox, A::Err> { // Create a fake RcBox to find allocation size and alignment - let fake_ptr = ptr as *mut RcBox; + let fake_ptr = ptr as *mut RcBox; let layout = Layout::for_value(&*fake_ptr); - let mem = Global.alloc(layout) - .unwrap_or_else(|_| handle_alloc_error(layout)); + let mem = alloc.alloc(layout)?; // Initialize the real RcBox - let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox; - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); + ptr::write(inner as *mut RcBox<(), A>, RcBox { + strong: Cell::new(1), + weak: Cell::new(1), + alloc, + value: (), + }); - inner + Ok(inner) } - fn from_box(v: Box) -> Rc { - unsafe { - let box_unique = Box::into_unique(v); - let bptr = box_unique.as_ptr(); + /// `v` must be heap-allocated + unsafe fn from_box_raw(box_unique: Unique, alloc: A) -> Result, A::Err> { + let bptr = box_unique.as_ptr(); + let value_size = size_of_val(&*bptr); + let ptr = Self::allocate_for_ptr(bptr, alloc)?; - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + // Copy value as bytes + ptr::copy_nonoverlapping( + bptr as *const T as *const u8, + &mut (*ptr).value as *mut _ as *mut u8, + value_size); - // Copy value as bytes - ptr::copy_nonoverlapping( - bptr as *const T as *const u8, - &mut (*ptr).value as *mut _ as *mut u8, - value_size); + Ok(Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) + } + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in(v: Box, alloc: A) -> Result { + let (u, mut a) = Box::into_both(v); + unsafe { + let rc = Self::from_box_raw(u, alloc)?; // Free the allocation without dropping its contents - box_free(box_unique); + box_free_worker(u, &mut a); + Ok(rc) + } + } - Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in_same(v: Box) -> Result { + let (u, a) = Box::into_both(v); + unsafe { + let rc = Self::from_box_raw(u, a)?; + // Free the allocation without dropping its contents + box_free_worker(u, &mut (*rc.ptr.as_ptr()).alloc); + Ok(rc) } } } @@ -709,7 +760,7 @@ impl Rc<[T]> { // Unsafe because the caller must either take ownership or bind `T: Copy` unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let Ok(ptr) = Self::allocate_for_ptr(v_ptr, Default::default()); ptr::copy_nonoverlapping( v.as_ptr(), @@ -752,7 +803,7 @@ impl RcFromSlice for Rc<[T]> { unsafe { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let Ok(ptr) = Self::allocate_for_ptr(v_ptr, Default::default()); let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); @@ -788,7 +839,7 @@ impl RcFromSlice for Rc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Rc { +impl Deref for Rc { type Target = T; #[inline(always)] @@ -798,7 +849,7 @@ impl Deref for Rc { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { +unsafe impl<#[may_dangle] T: ?Sized, A: Alloc> Drop for Rc { /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1151,12 +1202,12 @@ impl From> for Rc<[T]> { /// [`Option`]: ../../std/option/enum.Option.html /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "rc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to a dangling pointer so that it doesn’t need // to allocate space on the heap. - ptr: NonNull>, + ptr: NonNull>, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -1184,8 +1235,16 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { + Self::new_in() + } +} + +impl Weak { + /// The same as `new`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in() -> Weak { Weak { - ptr: NonNull::dangling(), + ptr: NonNull::dangling() } } } @@ -1233,11 +1292,25 @@ impl Weak { Some(Rc { ptr: self.ptr, phantom: PhantomData }) } } +} + +impl Weak { + /// The same as `upgrade`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn upgrade_in(&self) -> Option> { + let inner = self.inner()?; + if inner.strong() == 0 { + None + } else { + inner.inc_strong(); + Some(Rc { ptr: self.ptr, phantom: PhantomData }) + } + } /// Return `None` when the pointer is dangling and there is no allocated `RcBox`, /// i.e. this `Weak` was created by `Weak::new` #[inline] - fn inner(&self) -> Option<&RcBox> { + fn inner(&self) -> Option<&RcBox> { if is_dangling(self.ptr) { None } else { @@ -1247,7 +1320,7 @@ impl Weak { } #[stable(feature = "rc_weak", since = "1.4.0")] -impl Drop for Weak { +impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -1346,8 +1419,8 @@ impl Default for Weak { // clone these much in Rust thanks to ownership and move-semantics. #[doc(hidden)] -trait RcBoxPtr { - fn inner(&self) -> &RcBox; +trait RcBoxPtr { + fn inner(&self) -> &RcBox; #[inline] fn strong(&self) -> usize { @@ -1380,18 +1453,18 @@ trait RcBoxPtr { } } -impl RcBoxPtr for Rc { +impl RcBoxPtr for Rc { #[inline(always)] - fn inner(&self) -> &RcBox { + fn inner(&self) -> &RcBox { unsafe { self.ptr.as_ref() } } } -impl RcBoxPtr for RcBox { +impl RcBoxPtr for RcBox { #[inline(always)] - fn inner(&self) -> &RcBox { + fn inner(&self) -> &RcBox { self } } diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs index 4d6434c378e82..a40cf7c8e57d1 100644 --- a/src/liballoc/str.rs +++ b/src/liballoc/str.rs @@ -47,6 +47,7 @@ use core::ptr; use core::iter::FusedIterator; use core::unicode::conversions; +use alloc::{Alloc, AllocHelper}; use borrow::{Borrow, ToOwned}; use boxed::Box; use slice::{SliceConcatExt, SliceIndex}; @@ -605,6 +606,10 @@ impl str { /// ``` #[stable(feature = "str_box_extras", since = "1.20.0")] #[inline] -pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box { - Box::from_raw(Box::into_raw(v) as *mut str) +pub unsafe fn from_boxed_utf8_unchecked + + (v: Box<[u8], A>) -> Box +{ + let (u, a) = Box::into_both(v); + Box::from_raw_in(u.as_ptr() as *mut str, a) } diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs index 6b28687a060de..198c22f75f279 100644 --- a/src/liballoc/string.rs +++ b/src/liballoc/string.rs @@ -66,6 +66,7 @@ use core::ptr; use core::str::pattern::Pattern; use core::str::lossy; +use alloc::AllocErr; use collections::CollectionAllocErr; use borrow::{Cow, ToOwned}; use boxed::Box; @@ -952,7 +953,7 @@ impl String { /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve(additional) } @@ -990,7 +991,7 @@ impl String { /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.vec.try_reserve_exact(additional) } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 5def0237e7e71..3340ed1b4c416 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -12,7 +12,7 @@ //! Thread-safe reference-counting pointers. //! -//! See the [`Arc`][arc] documentation for more details. +//! See the [`Arc`][arc] documentation for more details. //! //! [arc]: struct.Arc.html @@ -26,13 +26,14 @@ use core::intrinsics::abort; use core::mem::{self, align_of_val, size_of_val}; use core::ops::Deref; use core::ops::CoerceUnsized; -use core::ptr::{self, NonNull}; +use core::ptr::{self, NonNull, Unique}; use core::marker::{Unsize, PhantomData}; use core::hash::{Hash, Hasher}; use core::{isize, usize}; use core::convert::From; -use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error}; +use abort_adapter::AbortAdapter; +use alloc::{Global, Alloc, Layout, box_free_worker}; use boxed::Box; use rc::is_dangling; use string::String; @@ -47,7 +48,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// -/// The type `Arc` provides shared ownership of a value of type `T`, +/// The type `Arc` provides shared ownership of a value of type `T`, /// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces /// a new pointer to the same value in the heap. When the last `Arc` /// pointer to a given value is destroyed, the pointed-to value is @@ -67,21 +68,21 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// are not sharing reference-counted values between threads, consider using /// [`Rc`] for lower overhead. [`Rc`] is a safe default, because the /// compiler will catch any attempt to send an [`Rc`] between threads. -/// However, a library might choose `Arc` in order to give library consumers +/// However, a library might choose `Arc` in order to give library consumers /// more flexibility. /// -/// `Arc` will implement [`Send`] and [`Sync`] as long as the `T` implements +/// `Arc` will implement [`Send`] and [`Sync`] as long as the `T` implements /// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an -/// `Arc` to make it thread-safe? This may be a bit counter-intuitive at -/// first: after all, isn't the point of `Arc` thread safety? The key is -/// this: `Arc` makes it thread safe to have multiple ownership of the same +/// `Arc` to make it thread-safe? This may be a bit counter-intuitive at +/// first: after all, isn't the point of `Arc` thread safety? The key is +/// this: `Arc` makes it thread safe to have multiple ownership of the same /// data, but it doesn't add thread safety to its data. Consider -/// `Arc<`[`RefCell`]`>`. [`RefCell`] isn't [`Sync`], and if `Arc` was always +/// `Arc<`[`RefCell`]`>`. [`RefCell`] isn't [`Sync`], and if `Arc` was always /// [`Send`], `Arc<`[`RefCell`]`>` would be as well. But then we'd have a problem: /// [`RefCell`] is not thread safe; it keeps track of the borrowing count using /// non-atomic operations. /// -/// In the end, this means that you may need to pair `Arc` with some sort of +/// In the end, this means that you may need to pair `Arc` with some sort of /// [`std::sync`] type, usually [`Mutex`][mutex]. /// /// ## Breaking cycles with `Weak` @@ -99,7 +100,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// # Cloning references /// /// Creating a new reference from an existing reference counted pointer is done using the -/// `Clone` trait implemented for [`Arc`][arc] and [`Weak`][weak]. +/// `Clone` trait implemented for [`Arc`][arc] and [`Weak`][weak]. /// /// ``` /// use std::sync::Arc; @@ -116,9 +117,9 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// /// ## `Deref` behavior /// -/// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), -/// so you can call `T`'s methods on a value of type `Arc`. To avoid name -/// clashes with `T`'s methods, the methods of `Arc` itself are [associated +/// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), +/// so you can call `T`'s methods on a value of type `Arc`. To avoid name +/// clashes with `T`'s methods, the methods of `Arc` itself are [associated /// functions][assoc], called using function-like syntax: /// /// ``` @@ -198,18 +199,18 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize; /// /// [rc_examples]: ../../std/rc/index.html#examples #[stable(feature = "rust1", since = "1.0.0")] -pub struct Arc { - ptr: NonNull>, +pub struct Arc> { + ptr: NonNull>, phantom: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for Arc {} +unsafe impl Send for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for Arc {} +unsafe impl Sync for Arc {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Arc {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Arc {} /// `Weak` is a version of [`Arc`] that holds a non-owning reference to the /// managed value. The value is accessed by calling [`upgrade`] on the `Weak` @@ -235,30 +236,30 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} /// [`Option`]: ../../std/option/enum.Option.html /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "arc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to a dangling pointer so that it doesn’t need // to allocate space on the heap. - ptr: NonNull>, + ptr: NonNull>, } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Send for Weak {} +unsafe impl Send for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Sync for Weak {} +unsafe impl Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "27732")] -impl, U: ?Sized> CoerceUnsized> for Weak {} +impl, U: ?Sized, A: Alloc> CoerceUnsized> for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] -impl fmt::Debug for Weak { +impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "(Weak)") } } -struct ArcInner { +struct ArcInner> { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the @@ -266,14 +267,16 @@ struct ArcInner { // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, + alloc: A, + data: T, } -unsafe impl Send for ArcInner {} -unsafe impl Sync for ArcInner {} +unsafe impl Send for ArcInner {} +unsafe impl Sync for ArcInner {} impl Arc { - /// Constructs a new `Arc`. + /// Constructs a new `Arc`. /// /// # Examples /// @@ -285,14 +288,26 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { + let Ok(a) = Self::new_in(data, Default::default()); + a + } +} + +impl Arc { + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(data: T, alloc: A) -> Result, A::Err> { // Start the weak pointer count as 1 which is the weak pointer that's // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = box ArcInner { + let x: Box<_, A> = Box::new_in(ArcInner { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1), + alloc: unsafe { mem::uninitialized() }, data, - }; - Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData } + }, alloc)?; + let (unique, mut alloc_after) = Box::into_both(x); + mem::swap(unsafe { &mut (*unique.as_ptr()).alloc }, &mut alloc_after); + mem::forget(alloc_after); + Ok(Arc { ptr: From::from(unique), phantom: PhantomData }) } /// Returns the contained value, if the `Arc` has exactly one strong reference. @@ -339,29 +354,6 @@ impl Arc { } impl Arc { - /// Consumes the `Arc`, returning the wrapped pointer. - /// - /// To avoid a memory leak the pointer must be converted back to an `Arc` using - /// [`Arc::from_raw`][from_raw]. - /// - /// [from_raw]: struct.Arc.html#method.from_raw - /// - /// # Examples - /// - /// ``` - /// use std::sync::Arc; - /// - /// let x = Arc::new(10); - /// let x_ptr = Arc::into_raw(x); - /// assert_eq!(unsafe { *x_ptr }, 10); - /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub fn into_raw(this: Self) -> *const T { - let ptr: *const T = &*this; - mem::forget(this); - ptr - } - /// Constructs an `Arc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to a @@ -392,14 +384,44 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { + Self::from_raw_in(ptr) + } +} + +impl Arc { + /// Consumes the `Arc`, returning the wrapped pointer. + /// + /// To avoid a memory leak the pointer must be converted back to an `Arc` using + /// [`Arc::from_raw`][from_raw]. + /// + /// [from_raw]: struct.Arc.html#method.from_raw + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let x = Arc::new(10); + /// let x_ptr = Arc::into_raw(x); + /// assert_eq!(unsafe { *x_ptr }, 10); + /// ``` + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr: *const T = &*this; + mem::forget(this); + ptr + } + + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T) -> Self { // Align the unsized value to the end of the ArcInner. // Because it is ?Sized, it will always be the last field in memory. let align = align_of_val(&*ptr); - let layout = Layout::new::>(); + let layout = Layout::new::>(); let offset = (layout.size() + layout.padding_needed_for(align)) as isize; // Reverse the offset to find the original ArcInner. - let fake_ptr = ptr as *mut ArcInner; + let fake_ptr = ptr as *mut ArcInner; let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)); Arc { @@ -422,7 +444,7 @@ impl Arc { /// let weak_five = Arc::downgrade(&five); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. let mut cur = this.inner().weak.load(Relaxed); @@ -506,7 +528,7 @@ impl Arc { } #[inline] - fn inner(&self) -> &ArcInner { + fn inner(&self) -> &ArcInner { // This unsafety is ok because while this arc is alive we're guaranteed // that the inner pointer is valid. Furthermore, we know that the // `ArcInner` structure itself is `Sync` because the inner data is @@ -524,7 +546,15 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + + // The logic here is a little tricky because we're using an Alloc + // stored in the data to free the container. We first move the Alloc + // out of the container and then free the container. The Alloc itself + // will be dropped when it goes out of scope in this function. + + let mut alloc = ptr::read(&(*self.ptr.as_ptr()).alloc); + + alloc.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -551,43 +581,69 @@ impl Arc { } impl Arc { - // Allocates an `ArcInner` with sufficient space for an unsized value - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { + fn from_box(v: Box) -> Arc { + let Ok(a) = Self::from_box_in(v, Default::default()); + a + } +} + +impl Arc { + // Allocates an `ArcInner` with sufficient space for an unsized value + unsafe fn allocate_for_ptr(ptr: *const T, mut alloc: A) -> Result<*mut ArcInner, A::Err> { // Create a fake ArcInner to find allocation size and alignment - let fake_ptr = ptr as *mut ArcInner; + let fake_ptr = ptr as *mut ArcInner; let layout = Layout::for_value(&*fake_ptr); - let mem = Global.alloc(layout) - .unwrap_or_else(|_| handle_alloc_error(layout)); + let mem = alloc.alloc(layout)?; // Initialize the real ArcInner - let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner; - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + ptr::write(inner as *mut ArcInner<(), A>, ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + alloc, + data: (), + }); - inner + Ok(inner) } - fn from_box(v: Box) -> Arc { - unsafe { - let box_unique = Box::into_unique(v); - let bptr = box_unique.as_ptr(); + /// `v` must be heap-allocated + unsafe fn from_box_raw(box_unique: Unique, alloc: A) -> Result { + let bptr = box_unique.as_ptr(); + let value_size = size_of_val(&*bptr); + let ptr = Self::allocate_for_ptr(bptr, alloc)?; - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + // Copy value as bytes + ptr::copy_nonoverlapping( + bptr as *const T as *const u8, + &mut (*ptr).data as *mut _ as *mut u8, + value_size); - // Copy value as bytes - ptr::copy_nonoverlapping( - bptr as *const T as *const u8, - &mut (*ptr).data as *mut _ as *mut u8, - value_size); + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) + } + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in(v: Box, alloc: A) -> Result { + let (u, mut a) = Box::into_both(v); + unsafe { + let arc = Self::from_box_raw(u, alloc)?; // Free the allocation without dropping its contents - box_free(box_unique); + box_free_worker(u, &mut a); + Ok(arc) + } + } - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn from_box_in_same(v: Box) -> Result { + let (u, a) = Box::into_both(v); + unsafe { + let arc = Self::from_box_raw(u, a)?; + // Free the allocation without dropping its contents + box_free_worker(u, &mut (*arc.ptr.as_ptr()).alloc); + Ok(arc) } } } @@ -602,41 +658,49 @@ unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T { } impl Arc<[T]> { - // Copy elements from slice into newly allocated Arc<[T]> + unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + let Ok(a) = Self::copy_from_slice_in(v, Default::default()); + a + } +} + +impl Arc<[T], A> { + // Copy elements from slice into newly allocated Arc<[T], A> // // Unsafe because the caller must either take ownership or bind `T: Copy` - unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + unsafe fn copy_from_slice_in(v: &[T], alloc: A) -> Result, A::Err> { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let ptr = Self::allocate_for_ptr(v_ptr, alloc)?; ptr::copy_nonoverlapping( v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) } } // Specialization trait used for From<&[T]> -trait ArcFromSlice { - fn from_slice(slice: &[T]) -> Self; +trait ArcFromSlice: Sized { + fn from_slice(slice: &[T], alloc: A) -> Result; } -impl ArcFromSlice for Arc<[T]> { +impl ArcFromSlice for Arc<[T], A> { #[inline] - default fn from_slice(v: &[T]) -> Self { + default fn from_slice(v: &[T], alloc: A) -> Result { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. - struct Guard { + struct Guard<'a, T, A: 'a + Alloc> { mem: NonNull, elems: *mut T, layout: Layout, n_elems: usize, + alloc: &'a mut A, } - impl Drop for Guard { + impl<'a, T, A: 'a + Alloc> Drop for Guard<'a, T, A> { fn drop(&mut self) { use core::slice::from_raw_parts_mut; @@ -644,14 +708,14 @@ impl ArcFromSlice for Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem.cast(), self.layout.clone()); + self.alloc.dealloc(self.mem.cast(), self.layout.clone()); } } } unsafe { let v_ptr = v as *const [T]; - let ptr = Self::allocate_for_ptr(v_ptr); + let ptr = Self::allocate_for_ptr(v_ptr, alloc)?; let mem = ptr as *mut _ as *mut u8; let layout = Layout::for_value(&*ptr); @@ -664,6 +728,7 @@ impl ArcFromSlice for Arc<[T]> { elems: elems, layout: layout, n_elems: 0, + alloc: &mut (*ptr).alloc, }; for (i, item) in v.iter().enumerate() { @@ -674,20 +739,20 @@ impl ArcFromSlice for Arc<[T]> { // All clear. Forget the guard so it doesn't free the new ArcInner. mem::forget(guard); - Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData } + Ok(Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }) } } } -impl ArcFromSlice for Arc<[T]> { +impl ArcFromSlice for Arc<[T], A> { #[inline] - fn from_slice(v: &[T]) -> Self { - unsafe { Arc::copy_from_slice(v) } + fn from_slice(v: &[T], alloc: A) -> Result { + unsafe { Arc::copy_from_slice_in(v, alloc) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Arc { +impl Clone for Arc { /// Makes a clone of the `Arc` pointer. /// /// This creates another pointer to the same inner value, increasing the @@ -703,7 +768,7 @@ impl Clone for Arc { /// Arc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Arc { + fn clone(&self) -> Arc { // Using a relaxed ordering is alright here, as knowledge of the // original reference prevents other threads from erroneously deleting // the object. @@ -737,7 +802,7 @@ impl Clone for Arc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Arc { +impl Deref for Arc { type Target = T; #[inline] @@ -831,7 +896,7 @@ impl Arc { } } -impl Arc { +impl Arc { /// Returns a mutable reference to the inner value, if there are /// no other `Arc` or [`Weak`][weak] pointers to the same value. /// @@ -905,7 +970,7 @@ impl Arc { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { +unsafe impl<#[may_dangle] T: ?Sized, A: Alloc> Drop for Arc { /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1032,13 +1097,21 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { + Weak::new_in() + } +} + +impl Weak { + /// The same as `new`, but separate for stability reasons + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in() -> Weak { Weak { ptr: NonNull::dangling(), } } } -impl Weak { +impl Weak { /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending /// the lifetime of the value if successful. /// @@ -1066,7 +1139,7 @@ impl Weak { /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option> { + pub fn upgrade(&self) -> Option> { // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 it must never be above 0. let inner = self.inner()?; @@ -1104,7 +1177,7 @@ impl Weak { /// Return `None` when the pointer is dangling and there is no allocated `ArcInner`, /// i.e. this `Weak` was created by `Weak::new` #[inline] - fn inner(&self) -> Option<&ArcInner> { + fn inner(&self) -> Option<&ArcInner> { if is_dangling(self.ptr) { None } else { @@ -1114,7 +1187,7 @@ impl Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl Clone for Weak { +impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same value. /// /// # Examples @@ -1127,7 +1200,7 @@ impl Clone for Weak { /// Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak { + fn clone(&self) -> Weak { let inner = if let Some(inner) = self.inner() { inner } else { @@ -1172,7 +1245,7 @@ impl Default for Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl Drop for Weak { +impl Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -1215,14 +1288,20 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + // The logic here is a little tricky because we're using an Alloc + // stored in the data to free the container. We first move the Alloc + // out of the container and then free the container. The Alloc itself + // will be dropped when it goes out of scope in this function. + + let mut alloc = ptr::read(&(*self.ptr.as_ptr()).alloc); + alloc.dealloc(self.ptr.cast(), Layout::for_value(&*self.ptr.as_ptr())) } } } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Arc { +impl PartialEq for Arc { /// Equality for two `Arc`s. /// /// Two `Arc`s are equal if their inner values are equal. @@ -1236,7 +1315,7 @@ impl PartialEq for Arc { /// /// assert!(five == Arc::new(5)); /// ``` - fn eq(&self, other: &Arc) -> bool { + fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } @@ -1253,12 +1332,12 @@ impl PartialEq for Arc { /// /// assert!(five != Arc::new(6)); /// ``` - fn ne(&self, other: &Arc) -> bool { + fn ne(&self, other: &Arc) -> bool { *(*self) != *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Arc { +impl PartialOrd for Arc { /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -1273,7 +1352,7 @@ impl PartialOrd for Arc { /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` - fn partial_cmp(&self, other: &Arc) -> Option { + fn partial_cmp(&self, other: &Arc) -> Option { (**self).partial_cmp(&**other) } @@ -1290,7 +1369,7 @@ impl PartialOrd for Arc { /// /// assert!(five < Arc::new(6)); /// ``` - fn lt(&self, other: &Arc) -> bool { + fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } @@ -1307,7 +1386,7 @@ impl PartialOrd for Arc { /// /// assert!(five <= Arc::new(5)); /// ``` - fn le(&self, other: &Arc) -> bool { + fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } @@ -1324,7 +1403,7 @@ impl PartialOrd for Arc { /// /// assert!(five > Arc::new(4)); /// ``` - fn gt(&self, other: &Arc) -> bool { + fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } @@ -1341,12 +1420,12 @@ impl PartialOrd for Arc { /// /// assert!(five >= Arc::new(5)); /// ``` - fn ge(&self, other: &Arc) -> bool { + fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Arc { +impl Ord for Arc { /// Comparison for two `Arc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -1361,29 +1440,29 @@ impl Ord for Arc { /// /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` - fn cmp(&self, other: &Arc) -> Ordering { + fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Arc {} +impl Eq for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Arc { +impl fmt::Display for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Arc { +impl fmt::Debug for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Arc { +impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -1391,7 +1470,7 @@ impl fmt::Pointer for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { - /// Creates a new `Arc`, with the `Default` value for `T`. + /// Creates a new `Arc`, with the `Default` value for `T`. /// /// # Examples /// @@ -1407,7 +1486,7 @@ impl Default for Arc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Arc { +impl Hash for Arc { fn hash(&self, state: &mut H) { (**self).hash(state) } @@ -1424,7 +1503,8 @@ impl From for Arc { impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> { #[inline] fn from(v: &[T]) -> Arc<[T]> { - >::from_slice(v) + let Ok(a) = >::from_slice(v, Default::default()); + a } } @@ -1739,7 +1819,7 @@ mod tests { assert_eq!(format!("{:?}", a), "5"); } - // Make sure deriving works with Arc + // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] struct Foo { inner: Arc, @@ -1925,14 +2005,14 @@ mod tests { } #[stable(feature = "rust1", since = "1.0.0")] -impl borrow::Borrow for Arc { +impl borrow::Borrow for Arc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Arc { +impl AsRef for Arc { fn as_ref(&self) -> &T { &**self } diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 5efe1e23309a7..fe39f95d75c98 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -80,6 +80,7 @@ use core::ptr; use core::ptr::NonNull; use core::slice; +use alloc::AllocErr; use collections::CollectionAllocErr; use borrow::ToOwned; use borrow::Cow; @@ -461,7 +462,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { - self.buf.reserve(self.len, additional); + let Ok(()) = self.buf.reserve(self.len, additional); } /// Reserves the minimum capacity for exactly `additional` more elements to @@ -486,7 +487,7 @@ impl Vec { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve_exact(&mut self, additional: usize) { - self.buf.reserve_exact(self.len, additional); + let Ok(()) = self.buf.reserve_exact(self.len, additional); } /// Tries to reserve capacity for at least `additional` more elements to be inserted @@ -522,7 +523,7 @@ impl Vec { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve(self.len, additional) } @@ -562,7 +563,7 @@ impl Vec { /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); /// ``` #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] - pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve_exact(self.len, additional) } @@ -583,7 +584,7 @@ impl Vec { #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { if self.capacity() != self.len { - self.buf.shrink_to_fit(self.len); + let Ok(()) = self.buf.shrink_to_fit(self.len); } } @@ -609,7 +610,7 @@ impl Vec { /// ``` #[unstable(feature = "shrink_to", reason = "new API", issue="0")] pub fn shrink_to(&mut self, min_capacity: usize) { - self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); + let Ok(()) = self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); } /// Converts the vector into [`Box<[T]>`][owned slice]. @@ -2679,7 +2680,7 @@ impl<'a, T> Drain<'a, T> { unsafe fn move_tail(&mut self, extra_capacity: usize) { let vec = self.vec.as_mut(); let used_capacity = self.tail_start + self.tail_len; - vec.buf.reserve(used_capacity, extra_capacity); + let Ok(()) = vec.buf.reserve(used_capacity, extra_capacity); let new_tail_start = self.tail_start + extra_capacity; let src = vec.as_ptr().offset(self.tail_start as isize); diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs index b6ac248b79f86..7a5cd6a7f601b 100644 --- a/src/libcore/alloc.rs +++ b/src/libcore/alloc.rs @@ -584,6 +584,17 @@ pub unsafe trait GlobalAlloc { } } +/// A hack so the default impl can condition on the associated type. This `Err` +/// type ought to just live in the `Alloc` trait. +#[unstable(feature = "allocator_api", issue = "32838")] +pub trait AllocHelper { + + /// The type of any errors thrown by the allocator, customarily + /// either `AllocErr`, for when error recovery is allowed, or `!` + /// to signify that all errors will result in . + type Err = AllocErr; +} + /// An implementation of `Alloc` can allocate, reallocate, and /// deallocate arbitrary blocks of data described via `Layout`. /// @@ -664,7 +675,7 @@ pub unsafe trait GlobalAlloc { /// Note that this list may get tweaked over time as clarifications are made in /// the future. #[unstable(feature = "allocator_api", issue = "32838")] -pub unsafe trait Alloc { +pub unsafe trait Alloc: AllocHelper { // (Note: some existing allocators have unspecified but well-defined // behavior in response to a zero size allocation request ; @@ -712,7 +723,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; + unsafe fn alloc(&mut self, layout: Layout) -> Result, Self::Err>; /// Deallocate the memory referenced by `ptr`. /// @@ -825,7 +836,7 @@ pub unsafe trait Alloc { unsafe fn realloc(&mut self, ptr: NonNull, layout: Layout, - new_size: usize) -> Result, AllocErr> { + new_size: usize) -> Result, Self::Err> { let old_size = layout.size(); if new_size >= old_size { @@ -868,7 +879,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { + unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, Self::Err> { let size = layout.size(); let p = self.alloc(layout); if let Ok(p) = p { @@ -896,7 +907,7 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { + unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { let usable_size = self.usable_size(&layout); self.alloc(layout).map(|p| Excess(p, usable_size.1)) } @@ -923,7 +934,7 @@ pub unsafe trait Alloc { unsafe fn realloc_excess(&mut self, ptr: NonNull, layout: Layout, - new_size: usize) -> Result { + new_size: usize) -> Result { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let usable_size = self.usable_size(&new_layout); self.realloc(ptr, layout, new_size) @@ -1069,16 +1080,8 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_one(&mut self) -> Result, AllocErr> - where Self: Sized - { - let k = Layout::new::(); - if k.size() > 0 { - unsafe { self.alloc(k).map(|p| p.cast()) } - } else { - Err(AllocErr) - } - } + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized; /// Deallocates a block suitable for holding an instance of `T`. /// @@ -1138,18 +1141,8 @@ pub unsafe trait Alloc { /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_array(&mut self, n: usize) -> Result, AllocErr> - where Self: Sized - { - match Layout::array::(n) { - Ok(ref layout) if layout.size() > 0 => { - unsafe { - self.alloc(layout.clone()).map(|p| p.cast()) - } - } - _ => Err(AllocErr), - } - } + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized; /// Reallocates a block previously suitable for holding `n_old` /// instances of `T`, returning a block suitable for holding @@ -1188,19 +1181,8 @@ pub unsafe trait Alloc { unsafe fn realloc_array(&mut self, ptr: NonNull, n_old: usize, - n_new: usize) -> Result, AllocErr> - where Self: Sized - { - match (Layout::array::(n_old), Layout::array::(n_new)) { - (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { - debug_assert!(k_old.align() == k_new.align()); - self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) - } - _ => { - Err(AllocErr) - } - } - } + n_new: usize) -> Result, Self::Err> + where Self: Sized; /// Deallocates a block suitable for holding `n` instances of `T`. /// @@ -1222,8 +1204,38 @@ pub unsafe trait Alloc { /// constraints. /// /// Always returns `Err` on arithmetic overflow. - unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> - where Self: Sized + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized; +} + +#[unstable(feature = "allocator_api", issue = "32838")] +default unsafe impl> Alloc for A { + fn alloc_one(&mut self) -> Result, Self::Err> + where Self: Sized + { + let k = Layout::new::(); + if k.size() > 0 { + unsafe { self.alloc(k).map(|p| p.cast()) } + } else { + Err(AllocErr) + } + } + + fn alloc_array(&mut self, n: usize) -> Result, Self::Err> + where Self: Sized + { + match Layout::array::(n) { + Ok(ref layout) if layout.size() > 0 => { + unsafe { + self.alloc(layout.clone()).map(|p| p.cast()) + } + } + _ => Err(AllocErr), + } + } + + unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), Self::Err> + where Self: Sized { match Layout::array::(n) { Ok(ref k) if k.size() > 0 => { @@ -1234,4 +1246,21 @@ pub unsafe trait Alloc { } } } + + unsafe fn realloc_array(&mut self, + ptr: NonNull, + n_old: usize, + n_new: usize) -> Result, Self::Err> + where Self: Sized + { + match (Layout::array::(n_old), Layout::array::(n_new)) { + (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { + debug_assert!(k_old.align() == k_new.align()); + self.realloc(ptr.cast(), k_old.clone(), k_new.size()).map(NonNull::cast) + } + _ => { + Err(AllocErr) + } + } + } } diff --git a/src/test/mir-opt/validate_2.rs b/src/test/mir-opt/validate_2.rs index 3776a11b3ab82..46f0aaad47ce0 100644 --- a/src/test/mir-opt/validate_2.rs +++ b/src/test/mir-opt/validate_2.rs @@ -22,14 +22,14 @@ fn main() { // fn main() -> () { // ... // bb1: { -// Validate(Acquire, [_2: std::boxed::Box<[i32; 3]>]); -// Validate(Release, [_2: std::boxed::Box<[i32; 3]>]); -// _1 = move _2 as std::boxed::Box<[i32]> (Unsize); -// Validate(Acquire, [_1: std::boxed::Box<[i32]>]); +// Validate(Acquire, [_2: std::boxed::Box<[i32; 3], std::alloc::Global>]); +// Validate(Release, [_2: std::boxed::Box<[i32; 3], std::alloc::Global>]); +// _1 = move _2 as std::boxed::Box<[i32], std::alloc::Global> (Unsize); +// Validate(Acquire, [_1: std::boxed::Box<[i32], std::alloc::Global>]); // StorageDead(_2); // StorageDead(_3); // _0 = (); -// Validate(Release, [_1: std::boxed::Box<[i32]>]); +// Validate(Release, [_1: std::boxed::Box<[i32], std::alloc::Global>]); // drop(_1) -> [return: bb2, unwind: bb3]; // } // ... diff --git a/src/test/ui/e0119/conflict-with-std.stderr b/src/test/ui/e0119/conflict-with-std.stderr index e8b2c84c0df0b..72db200637f6a 100644 --- a/src/test/ui/e0119/conflict-with-std.stderr +++ b/src/test/ui/e0119/conflict-with-std.stderr @@ -5,8 +5,8 @@ LL | impl AsRef for Box { //~ ERROR conflicting implementations | ^^^^^^^^^^^^^^^^^^^^^^^^ | = note: conflicting implementation in crate `alloc`: - - impl std::convert::AsRef for std::boxed::Box - where T: ?Sized; + - impl std::convert::AsRef for std::boxed::Box + where A: std::alloc::Alloc, T: ?Sized; error[E0119]: conflicting implementations of trait `std::convert::From` for type `S`: --> $DIR/conflict-with-std.rs:24:1 diff --git a/src/test/ui/issue-14092.rs b/src/test/ui/issue-14092.rs index 449de26769ff3..f4cd32611a549 100644 --- a/src/test/ui/issue-14092.rs +++ b/src/test/ui/issue-14092.rs @@ -9,6 +9,6 @@ // except according to those terms. fn fn1(0: Box) {} - //~^ ERROR wrong number of type arguments: expected 1, found 0 [E0243] + //~^ ERROR wrong number of type arguments: expected at least 1, found 0 [E0243] fn main() {} diff --git a/src/test/ui/issue-14092.stderr b/src/test/ui/issue-14092.stderr index f90ea4776ab7c..c1d2ca1af1d6a 100644 --- a/src/test/ui/issue-14092.stderr +++ b/src/test/ui/issue-14092.stderr @@ -1,8 +1,8 @@ -error[E0243]: wrong number of type arguments: expected 1, found 0 +error[E0243]: wrong number of type arguments: expected at least 1, found 0 --> $DIR/issue-14092.rs:11:11 | LL | fn fn1(0: Box) {} - | ^^^ expected 1 type argument + | ^^^ expected at least 1 type argument error: aborting due to previous error diff --git a/src/test/ui/issue-41974.stderr b/src/test/ui/issue-41974.stderr index eca40ed43557d..ae32be16befe0 100644 --- a/src/test/ui/issue-41974.stderr +++ b/src/test/ui/issue-41974.stderr @@ -1,13 +1,13 @@ -error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_>`: +error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_, _>`: --> $DIR/issue-41974.rs:17:1 | LL | impl Drop for T where T: A { //~ ERROR E0119 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: conflicting implementation in crate `alloc`: - - impl std::ops::Drop for std::boxed::Box - where T: ?Sized; - = note: downstream crates may implement trait `A` for type `std::boxed::Box<_>` + - impl std::ops::Drop for std::boxed::Box + where A: std::alloc::Alloc, T: ?Sized; + = note: downstream crates may implement trait `A` for type `std::boxed::Box<_, _>` error[E0120]: the Drop trait may only be implemented on structures --> $DIR/issue-41974.rs:17:18 diff --git a/src/test/ui/nll/ty-outlives/projection-no-regions-closure.stderr b/src/test/ui/nll/ty-outlives/projection-no-regions-closure.stderr index e07051135779a..022970d3b9cbe 100644 --- a/src/test/ui/nll/ty-outlives/projection-no-regions-closure.stderr +++ b/src/test/ui/nll/ty-outlives/projection-no-regions-closure.stderr @@ -20,7 +20,7 @@ LL | with_signature(x, |mut y| Box::new(y.next())) '_#1r, T, i32, - extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#2r)> + extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#2r), std::alloc::Global> ] = note: number of external vids: 3 = note: where ::Item: '_#2r @@ -60,7 +60,7 @@ LL | with_signature(x, |mut y| Box::new(y.next())) '_#1r, T, i32, - extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#2r)> + extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#2r), std::alloc::Global> ] = note: number of external vids: 3 = note: where ::Item: '_#2r @@ -92,7 +92,7 @@ LL | with_signature(x, |mut y| Box::new(y.next())) '_#2r, T, i32, - extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#3r)> + extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#3r), std::alloc::Global> ] = note: number of external vids: 4 = note: where ::Item: '_#3r @@ -134,7 +134,7 @@ LL | with_signature(x, |mut y| Box::new(y.next())) '_#2r, T, i32, - extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#3r)> + extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn Anything + '_#3r), std::alloc::Global> ] = note: number of external vids: 4 = note: where ::Item: '_#3r diff --git a/src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.stderr b/src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.stderr index 39ad96cc6cd8e..29e3db8336d20 100644 --- a/src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.stderr +++ b/src/test/ui/nll/ty-outlives/ty-param-closure-outlives-from-return-type.stderr @@ -20,7 +20,7 @@ LL | with_signature(x, |y| y) '_#1r, T, i32, - extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn std::fmt::Debug + '_#2r)> + extern "rust-call" fn((std::boxed::Box,)) -> std::boxed::Box<(dyn std::fmt::Debug + '_#2r), std::alloc::Global> ] = note: number of external vids: 3 = note: where T: '_#2r