Skip to content

Commit

Permalink
Remove duplicate impl
Browse files Browse the repository at this point in the history
  • Loading branch information
Matthew Giordano committed Sep 6, 2024
1 parent 7b61eea commit a9cf084
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 88 deletions.
41 changes: 3 additions & 38 deletions alloc/src/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -460,42 +460,7 @@ impl<T> Rc<T> {
where
F: FnOnce(&Weak<T>) -> T,
{
// Construct the inner in the "uninitialized" state with a single
// weak reference.
let uninit_ptr: NonNull<_> = Box::leak(Box::new(RcBox {
strong: Cell::new(0),
weak: Cell::new(1),
value: mem::MaybeUninit::<T>::uninit(),
}))
.into();

let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();

let weak = Weak { ptr: init_ptr, alloc: Global };

// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
// we really wanted to pass ownership, we could create an additional
// weak pointer for ourselves, but this would result in additional
// updates to the weak reference count which might not be necessary
// otherwise.
let data = data_fn(&weak);

let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).value), data);

let prev_value = (*inner).strong.get();
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
(*inner).strong.set(1);

Rc::from_inner(init_ptr)
};

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
mem::forget(weak);
strong
Self::new_cyclic_in(data_fn, Global)
}

/// Constructs a new `Rc` with uninitialized contents.
Expand Down Expand Up @@ -801,8 +766,6 @@ impl<T, A: Allocator> Rc<T, A> {
where
F: FnOnce(&Weak<T, A>) -> T,
{
// Note: comments and implementation are copied from Rc::new_cyclic.

// Construct the inner in the "uninitialized" state with a single
// weak reference.
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
Expand Down Expand Up @@ -836,6 +799,8 @@ impl<T, A: Allocator> Rc<T, A> {

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
// Calling into_raw_with_allocator has the double effect of giving us back the allocator,
// and forgetting the weak reference.
let alloc = weak.into_raw_with_allocator().1;

Rc::from_inner_in(init_ptr, alloc)
Expand Down
53 changes: 3 additions & 50 deletions alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -450,54 +450,7 @@ impl<T> Arc<T> {
where
F: FnOnce(&Weak<T>) -> T,
{
// Construct the inner in the "uninitialized" state with a single
// weak reference.
let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner {
strong: atomic::AtomicUsize::new(0),
weak: atomic::AtomicUsize::new(1),
data: mem::MaybeUninit::<T>::uninit(),
}))
.into();
let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();

let weak = Weak { ptr: init_ptr, alloc: Global };

// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
// we really wanted to pass ownership, we could create an additional
// weak pointer for ourselves, but this would result in additional
// updates to the weak reference count which might not be necessary
// otherwise.
let data = data_fn(&weak);

// Now we can properly initialize the inner value and turn our weak
// reference into a strong reference.
let strong = unsafe {
let inner = init_ptr.as_ptr();
ptr::write(ptr::addr_of_mut!((*inner).data), data);

// The above write to the data field must be visible to any threads which
// observe a non-zero strong count. Therefore we need at least "Release" ordering
// in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
//
// "Acquire" ordering is not required. When considering the possible behaviours
// of `data_fn` we only need to look at what it could do with a reference to a
// non-upgradeable `Weak`:
// - It can *clone* the `Weak`, increasing the weak reference count.
// - It can drop those clones, decreasing the weak reference count (but never to zero).
//
// These side effects do not impact us in any way, and no other side effects are
// possible with safe code alone.
let prev_value = (*inner).strong.fetch_add(1, Release);
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");

Arc::from_inner(init_ptr)
};

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
mem::forget(weak);
strong
Self::new_cyclic_in(data_fn, Global)
}

/// Constructs a new `Arc` with uninitialized contents.
Expand Down Expand Up @@ -821,8 +774,6 @@ impl<T, A: Allocator> Arc<T, A> {
where
F: FnOnce(&Weak<T, A>) -> T,
{
// Note: these comments and much of the implementation is copied from Arc::new_cyclic.

// Construct the inner in the "uninitialized" state with a single
// weak reference.
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
Expand Down Expand Up @@ -869,6 +820,8 @@ impl<T, A: Allocator> Arc<T, A> {

// Strong references should collectively own a shared weak reference,
// so don't run the destructor for our old weak reference.
// Calling into_raw_with_allocator has the double effect of giving us back the allocator,
// and forgetting the weak reference.
let alloc = weak.into_raw_with_allocator().1;

Arc::from_inner_in(init_ptr, alloc)
Expand Down

0 comments on commit a9cf084

Please sign in to comment.