Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions library/alloc/src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,11 +192,15 @@ use core::fmt;
use core::future::Future;
use core::hash::{Hash, Hasher};
use core::marker::{Tuple, Unsize};
#[cfg(not(no_global_oom_handling))]
use core::mem::MaybeUninit;
use core::mem::{self, SizedTypeProperties};
use core::ops::{
AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut,
DerefPure, DispatchFromDyn, LegacyReceiver,
};
#[cfg(not(no_global_oom_handling))]
use core::ops::{Residual, Try};
use core::pin::{Pin, PinCoerceUnsized};
use core::ptr::{self, NonNull, Unique};
use core::task::{Context, Poll};
Expand Down Expand Up @@ -385,6 +389,82 @@ impl<T> Box<T> {
pub fn try_new_zeroed() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
Box::try_new_zeroed_in(Global)
}

/// Maps the value in a box, reusing the allocation if possible.
///
/// `f` is called on the value in the box, and the result is returned, also boxed.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::map(b, f)` instead of `b.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// let b = Box::new(7);
/// let new = Box::map(b, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> Box<U> {
if size_of::<T>() == size_of::<U>() && align_of::<T>() == align_of::<U>() {
let (value, allocation) = Box::take(this);
Box::write(
unsafe { mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<U>>>(allocation) },
f(value),
)
} else {
Box::new(f(*this))
}
}

/// Attempts to map the value in a box, reusing the allocation if possible.
///
/// `f` is called on the value in the box, and if the operation succeeds, the result is
/// returned, also boxed.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::try_map(b, f)` instead of `b.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// let b = Box::new(7);
/// let new = Box::try_map(b, u32::try_from).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(T) -> R,
) -> <R::Residual as Residual<Box<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<Box<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>() && align_of::<T>() == align_of::<R::Output>() {
let (value, allocation) = Box::take(this);
try {
Box::write(
unsafe {
mem::transmute::<Box<MaybeUninit<T>>, Box<MaybeUninit<R::Output>>>(
allocation,
)
},
f(value)?,
)
}
} else {
try { Box::new(f(*this)?) }
}
}
}

impl<T, A: Allocator> Box<T, A> {
Expand Down
2 changes: 2 additions & 0 deletions library/alloc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,9 @@
#![feature(trusted_fused)]
#![feature(trusted_len)]
#![feature(trusted_random_access)]
#![feature(try_blocks)]
#![feature(try_trait_v2)]
#![feature(try_trait_v2_residual)]
#![feature(try_with_capacity)]
#![feature(tuple_trait)]
#![feature(ub_checks)]
Expand Down
253 changes: 253 additions & 0 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,8 @@ use core::marker::{PhantomData, Unsize};
use core::mem::{self, ManuallyDrop, align_of_val_raw};
use core::num::NonZeroUsize;
use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
#[cfg(not(no_global_oom_handling))]
use core::ops::{Residual, Try};
use core::panic::{RefUnwindSafe, UnwindSafe};
#[cfg(not(no_global_oom_handling))]
use core::pin::Pin;
Expand Down Expand Up @@ -639,6 +641,93 @@ impl<T> Rc<T> {
pub fn pin(value: T) -> Pin<Rc<T>> {
unsafe { Pin::new_unchecked(Rc::new(value)) }
}

/// Maps the value in an `Rc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
/// an `Rc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::rc::Rc;
///
/// let r = Rc::new(7);
/// let new = Rc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& Rc::is_unique(&this)
{
unsafe {
let ptr = Rc::into_raw(this);
let value = ptr.read();
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());

Rc::get_mut_unchecked(&mut allocation).write(f(&value));
allocation.assume_init()
}
} else {
Rc::new(f(&*this))
}
}

/// Attempts to map the value in an `Rc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
/// result is returned, also in an `Rc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
///
/// use std::rc::Rc;
///
/// let b = Rc::new(7);
/// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(&T) -> R,
) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<Rc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& Rc::is_unique(&this)
{
unsafe {
let ptr = Rc::into_raw(this);
let value = ptr.read();
let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());

Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
try { allocation.assume_init() }
}
} else {
try { Rc::new(f(&*this)?) }
}
}
}

impl<T, A: Allocator> Rc<T, A> {
Expand Down Expand Up @@ -3991,6 +4080,128 @@ impl<T> UniqueRc<T> {
pub fn new(value: T) -> Self {
Self::new_in(value, Global)
}

/// Maps the value in a `UniqueRc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
/// also in a `UniqueRc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::rc::UniqueRc;
///
/// let r = UniqueRc::new(7);
/// let new = UniqueRc::map(r, |i| i + 7);
/// assert_eq!(*new, 14);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
if size_of::<T>() == size_of::<U>()
&& align_of::<T>() == align_of::<U>()
&& UniqueRc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueRc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());

allocation.write(f(value));
allocation.assume_init()
}
} else {
UniqueRc::new(f(UniqueRc::unwrap(this)))
}
}

/// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
///
/// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
/// the result is returned, also in a `UniqueRc`.
///
/// Note: this is an associated function, which means that you have
/// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
///
/// ```
/// #![feature(smart_pointer_try_map)]
/// #![feature(unique_rc_arc)]
///
/// use std::rc::UniqueRc;
///
/// let b = UniqueRc::new(7);
/// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
/// assert_eq!(*new, 7);
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "smart_pointer_try_map", issue = "144419")]
pub fn try_map<R>(
this: Self,
f: impl FnOnce(T) -> R,
) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
where
R: Try,
R::Residual: Residual<UniqueRc<R::Output>>,
{
if size_of::<T>() == size_of::<R::Output>()
&& align_of::<T>() == align_of::<R::Output>()
&& UniqueRc::weak_count(&this) == 0
{
unsafe {
let ptr = UniqueRc::into_raw(this);
let value = ptr.read();
let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());

allocation.write(f(value)?);
try { allocation.assume_init() }
}
} else {
try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
}
}

#[cfg(not(no_global_oom_handling))]
fn unwrap(this: Self) -> T {
let this = ManuallyDrop::new(this);
let val: T = unsafe { ptr::read(&**this) };

let _weak = Weak { ptr: this.ptr, alloc: Global };

val
}
}

impl<T: ?Sized> UniqueRc<T> {
#[cfg(not(no_global_oom_handling))]
unsafe fn from_raw(ptr: *const T) -> Self {
let offset = unsafe { data_offset(ptr) };

// Reverse the offset to find the original RcInner.
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };

Self {
ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
_marker: PhantomData,
_marker2: PhantomData,
alloc: Global,
}
}

#[cfg(not(no_global_oom_handling))]
fn into_raw(this: Self) -> *const T {
let this = ManuallyDrop::new(this);
Self::as_ptr(&*this)
}
}

impl<T, A: Allocator> UniqueRc<T, A> {
Expand Down Expand Up @@ -4041,6 +4252,40 @@ impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
Rc::from_inner_in(this.ptr, alloc)
}
}

#[cfg(not(no_global_oom_handling))]
fn weak_count(this: &Self) -> usize {
this.inner().weak() - 1
}

#[cfg(not(no_global_oom_handling))]
fn inner(&self) -> &RcInner<T> {
// SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
unsafe { self.ptr.as_ref() }
}

#[cfg(not(no_global_oom_handling))]
fn as_ptr(this: &Self) -> *const T {
let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);

// SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
// this is required to retain raw/mut provenance such that e.g. `get_mut` can
// write through the pointer after the Rc is recovered through `from_raw`.
unsafe { &raw mut (*ptr).value }
}

#[inline]
#[cfg(not(no_global_oom_handling))]
fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
let this = mem::ManuallyDrop::new(this);
(this.ptr, unsafe { ptr::read(&this.alloc) })
}

#[inline]
#[cfg(not(no_global_oom_handling))]
unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
}
}

impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
Expand All @@ -4059,6 +4304,14 @@ impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
}
}

#[cfg(not(no_global_oom_handling))]
impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
unsafe fn assume_init(self) -> UniqueRc<T, A> {
let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
}
}

#[unstable(feature = "unique_rc_arc", issue = "112566")]
impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
type Target = T;
Expand Down
Loading
Loading