Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 7b61eea

Browse files
author
Matthew Giordano
committedSep 6, 2024
remove the Clone requirement
1 parent 24eca28 commit 7b61eea

File tree

2 files changed

+170
-173
lines changed

2 files changed

+170
-173
lines changed
 

‎alloc/src/rc.rs

+78-80
Original file line numberDiff line numberDiff line change
@@ -766,6 +766,84 @@ impl<T, A: Allocator> Rc<T, A> {
766766
}
767767
}
768768

769+
/// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
770+
/// to allow you to construct a `T` which holds a weak pointer to itself.
771+
///
772+
/// Generally, a structure circularly referencing itself, either directly or
773+
/// indirectly, should not hold a strong reference to itself to prevent a memory leak.
774+
/// Using this function, you get access to the weak pointer during the
775+
/// initialization of `T`, before the `Rc<T, A>` is created, such that you can
776+
/// clone and store it inside the `T`.
777+
///
778+
/// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
779+
/// then calls your closure, giving it a `Weak<T, A>` to this allocation,
780+
/// and only afterwards completes the construction of the `Rc<T, A>` by placing
781+
/// the `T` returned from your closure into the allocation.
782+
///
783+
/// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
784+
/// returns, calling [`upgrade`] on the weak reference inside your closure will
785+
/// fail and result in a `None` value.
786+
///
787+
/// # Panics
788+
///
789+
/// If `data_fn` panics, the panic is propagated to the caller, and the
790+
/// temporary [`Weak<T, A>`] is dropped normally.
791+
///
792+
/// # Examples
793+
///
794+
/// See [`new_cyclic`].
795+
///
796+
/// [`new_cyclic`]: Rc::new_cyclic
797+
/// [`upgrade`]: Weak::upgrade
798+
#[cfg(not(no_global_oom_handling))]
799+
#[unstable(feature = "allocator_api", issue = "32838")]
800+
pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
801+
where
802+
F: FnOnce(&Weak<T, A>) -> T,
803+
{
804+
// Note: comments and implementation are copied from Rc::new_cyclic.
805+
806+
// Construct the inner in the "uninitialized" state with a single
807+
// weak reference.
808+
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
809+
RcBox {
810+
strong: Cell::new(0),
811+
weak: Cell::new(1),
812+
value: mem::MaybeUninit::<T>::uninit(),
813+
},
814+
alloc,
815+
));
816+
let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
817+
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
818+
819+
let weak = Weak { ptr: init_ptr, alloc: alloc };
820+
821+
// It's important we don't give up ownership of the weak pointer, or
822+
// else the memory might be freed by the time `data_fn` returns. If
823+
// we really wanted to pass ownership, we could create an additional
824+
// weak pointer for ourselves, but this would result in additional
825+
// updates to the weak reference count which might not be necessary
826+
// otherwise.
827+
let data = data_fn(&weak);
828+
829+
let strong = unsafe {
830+
let inner = init_ptr.as_ptr();
831+
ptr::write(ptr::addr_of_mut!((*inner).value), data);
832+
833+
let prev_value = (*inner).strong.get();
834+
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
835+
(*inner).strong.set(1);
836+
837+
// Strong references should collectively own a shared weak reference,
838+
// so don't run the destructor for our old weak reference.
839+
let alloc = weak.into_raw_with_allocator().1;
840+
841+
Rc::from_inner_in(init_ptr, alloc)
842+
};
843+
844+
strong
845+
}
846+
769847
/// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
770848
/// fails
771849
///
@@ -2264,86 +2342,6 @@ impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
22642342
}
22652343
}
22662344

2267-
impl<T, A: Allocator + Clone> Rc<T, A> {
2268-
/// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
2269-
/// to allow you to construct a `T` which holds a weak pointer to itself.
2270-
///
2271-
/// Generally, a structure circularly referencing itself, either directly or
2272-
/// indirectly, should not hold a strong reference to itself to prevent a memory leak.
2273-
/// Using this function, you get access to the weak pointer during the
2274-
/// initialization of `T`, before the `Rc<T, A>` is created, such that you can
2275-
/// clone and store it inside the `T`.
2276-
///
2277-
/// `new_cyclic` first allocates the managed allocation for the `Rc<T, A>`,
2278-
/// then calls your closure, giving it a `Weak<T, A>` to this allocation,
2279-
/// and only afterwards completes the construction of the `Rc<T, A>` by placing
2280-
/// the `T` returned from your closure into the allocation.
2281-
///
2282-
/// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
2283-
/// returns, calling [`upgrade`] on the weak reference inside your closure will
2284-
/// fail and result in a `None` value.
2285-
///
2286-
/// # Panics
2287-
///
2288-
/// If `data_fn` panics, the panic is propagated to the caller, and the
2289-
/// temporary [`Weak<T, A>`] is dropped normally.
2290-
///
2291-
/// # Examples
2292-
///
2293-
/// See [`new_cyclic`].
2294-
///
2295-
/// [`new_cyclic`]: Rc::new_cyclic
2296-
/// [`upgrade`]: Weak::upgrade
2297-
#[cfg(not(no_global_oom_handling))]
2298-
#[unstable(feature = "allocator_api", issue = "32838")]
2299-
pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
2300-
where
2301-
F: FnOnce(&Weak<T, A>) -> T,
2302-
{
2303-
// Note: comments and implementation are copied from Rc::new_cyclic.
2304-
2305-
// Construct the inner in the "uninitialized" state with a single
2306-
// weak reference.
2307-
let uninit_ptr: NonNull<_> = Box::leak(Box::new_in(
2308-
RcBox {
2309-
strong: Cell::new(0),
2310-
weak: Cell::new(1),
2311-
value: mem::MaybeUninit::<T>::uninit(),
2312-
},
2313-
alloc.clone(),
2314-
))
2315-
.into();
2316-
2317-
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
2318-
2319-
let weak = Weak { ptr: init_ptr, alloc: alloc.clone() };
2320-
2321-
// It's important we don't give up ownership of the weak pointer, or
2322-
// else the memory might be freed by the time `data_fn` returns. If
2323-
// we really wanted to pass ownership, we could create an additional
2324-
// weak pointer for ourselves, but this would result in additional
2325-
// updates to the weak reference count which might not be necessary
2326-
// otherwise.
2327-
let data = data_fn(&weak);
2328-
2329-
let strong = unsafe {
2330-
let inner = init_ptr.as_ptr();
2331-
ptr::write(ptr::addr_of_mut!((*inner).value), data);
2332-
2333-
let prev_value = (*inner).strong.get();
2334-
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
2335-
(*inner).strong.set(1);
2336-
2337-
Rc::from_inner_in(init_ptr, alloc)
2338-
};
2339-
2340-
// Strong references should collectively own a shared weak reference,
2341-
// so don't run the destructor for our old weak reference.
2342-
mem::forget(weak);
2343-
strong
2344-
}
2345-
}
2346-
23472345
#[cfg(not(no_global_oom_handling))]
23482346
#[stable(feature = "rust1", since = "1.0.0")]
23492347
impl<T: Default> Default for Rc<T> {

‎alloc/src/sync.rs

+92-93
Original file line numberDiff line numberDiff line change
@@ -785,6 +785,98 @@ impl<T, A: Allocator> Arc<T, A> {
785785
}
786786
}
787787

788+
/// Constructs a new `Arc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
789+
/// to allow you to construct a `T` which holds a weak pointer to itself.
790+
///
791+
/// Generally, a structure circularly referencing itself, either directly or
792+
/// indirectly, should not hold a strong reference to itself to prevent a memory leak.
793+
/// Using this function, you get access to the weak pointer during the
794+
/// initialization of `T`, before the `Arc<T, A>` is created, such that you can
795+
/// clone and store it inside the `T`.
796+
///
797+
/// `new_cyclic_in` first allocates the managed allocation for the `Arc<T, A>`,
798+
/// then calls your closure, giving it a `Weak<T, A>` to this allocation,
799+
/// and only afterwards completes the construction of the `Arc<T, A>` by placing
800+
/// the `T` returned from your closure into the allocation.
801+
///
802+
/// Since the new `Arc<T, A>` is not fully-constructed until `Arc<T, A>::new_cyclic_in`
803+
/// returns, calling [`upgrade`] on the weak reference inside your closure will
804+
/// fail and result in a `None` value.
805+
///
806+
/// # Panics
807+
///
808+
/// If `data_fn` panics, the panic is propagated to the caller, and the
809+
/// temporary [`Weak<T>`] is dropped normally.
810+
///
811+
/// # Example
812+
///
813+
/// See [`new_cyclic`]
814+
///
815+
/// [`new_cyclic`]: Arc::new_cyclic
816+
/// [`upgrade`]: Weak::upgrade
817+
#[cfg(not(no_global_oom_handling))]
818+
#[inline]
819+
#[stable(feature = "arc_new_cyclic", since = "1.60.0")]
820+
pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Arc<T, A>
821+
where
822+
F: FnOnce(&Weak<T, A>) -> T,
823+
{
824+
// Note: these comments and much of the implementation is copied from Arc::new_cyclic.
825+
826+
// Construct the inner in the "uninitialized" state with a single
827+
// weak reference.
828+
let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
829+
ArcInner {
830+
strong: atomic::AtomicUsize::new(0),
831+
weak: atomic::AtomicUsize::new(1),
832+
data: mem::MaybeUninit::<T>::uninit(),
833+
},
834+
alloc,
835+
));
836+
let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
837+
let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
838+
839+
let weak = Weak { ptr: init_ptr, alloc: alloc };
840+
841+
// It's important we don't give up ownership of the weak pointer, or
842+
// else the memory might be freed by the time `data_fn` returns. If
843+
// we really wanted to pass ownership, we could create an additional
844+
// weak pointer for ourselves, but this would result in additional
845+
// updates to the weak reference count which might not be necessary
846+
// otherwise.
847+
let data = data_fn(&weak);
848+
849+
// Now we can properly initialize the inner value and turn our weak
850+
// reference into a strong reference.
851+
let strong = unsafe {
852+
let inner = init_ptr.as_ptr();
853+
ptr::write(ptr::addr_of_mut!((*inner).data), data);
854+
855+
// The above write to the data field must be visible to any threads which
856+
// observe a non-zero strong count. Therefore we need at least "Release" ordering
857+
// in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
858+
//
859+
// "Acquire" ordering is not required. When considering the possible behaviours
860+
// of `data_fn` we only need to look at what it could do with a reference to a
861+
// non-upgradeable `Weak`:
862+
// - It can *clone* the `Weak`, increasing the weak reference count.
863+
// - It can drop those clones, decreasing the weak reference count (but never to zero).
864+
//
865+
// These side effects do not impact us in any way, and no other side effects are
866+
// possible with safe code alone.
867+
let prev_value = (*inner).strong.fetch_add(1, Release);
868+
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
869+
870+
// Strong references should collectively own a shared weak reference,
871+
// so don't run the destructor for our old weak reference.
872+
let alloc = weak.into_raw_with_allocator().1;
873+
874+
Arc::from_inner_in(init_ptr, alloc)
875+
};
876+
877+
strong
878+
}
879+
788880
/// Constructs a new `Pin<Arc<T, A>>` in the provided allocator. If `T` does not implement `Unpin`,
789881
/// then `data` will be pinned in memory and unable to be moved.
790882
#[cfg(not(no_global_oom_handling))]
@@ -1322,99 +1414,6 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
13221414
}
13231415
}
13241416

1325-
impl<T, A: Allocator + Clone> Arc<T, A> {
1326-
/// Constructs a new `Arc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
1327-
/// to allow you to construct a `T` which holds a weak pointer to itself.
1328-
///
1329-
/// Generally, a structure circularly referencing itself, either directly or
1330-
/// indirectly, should not hold a strong reference to itself to prevent a memory leak.
1331-
/// Using this function, you get access to the weak pointer during the
1332-
/// initialization of `T`, before the `Arc<T, A>` is created, such that you can
1333-
/// clone and store it inside the `T`.
1334-
///
1335-
/// `new_cyclic` first allocates the managed allocation for the `Arc<T, A>`,
1336-
/// then calls your closure, giving it a `Weak<T, A>` to this allocation,
1337-
/// and only afterwards completes the construction of the `Arc<T, A>` by placing
1338-
/// the `T` returned from your closure into the allocation.
1339-
///
1340-
/// Since the new `Arc<T, A>` is not fully-constructed until `Arc<T, A>::new_cyclic_in`
1341-
/// returns, calling [`upgrade`] on the weak reference inside your closure will
1342-
/// fail and result in a `None` value.
1343-
///
1344-
/// # Panics
1345-
///
1346-
/// If `data_fn` panics, the panic is propagated to the caller, and the
1347-
/// temporary [`Weak<T>`] is dropped normally.
1348-
///
1349-
/// # Example
1350-
///
1351-
/// See [`new_cyclic`]
1352-
///
1353-
/// [`new_cyclic`]: Arc::new_cyclic
1354-
/// [`upgrade`]: Weak::upgrade
1355-
#[cfg(not(no_global_oom_handling))]
1356-
#[inline]
1357-
#[stable(feature = "arc_new_cyclic", since = "1.60.0")]
1358-
pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Arc<T, A>
1359-
where
1360-
F: FnOnce(&Weak<T, A>) -> T,
1361-
{
1362-
// Note: these comments and much of the implementation is copied from Arc::new_cyclic.
1363-
1364-
// Construct the inner in the "uninitialized" state with a single
1365-
// weak reference.
1366-
let uninit_ptr: NonNull<_> = Box::leak(Box::new_in(
1367-
ArcInner {
1368-
strong: atomic::AtomicUsize::new(0),
1369-
weak: atomic::AtomicUsize::new(1),
1370-
data: mem::MaybeUninit::<T>::uninit(),
1371-
},
1372-
alloc.clone(),
1373-
))
1374-
.into();
1375-
let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
1376-
1377-
let weak = Weak { ptr: init_ptr, alloc: alloc.clone() };
1378-
1379-
// It's important we don't give up ownership of the weak pointer, or
1380-
// else the memory might be freed by the time `data_fn` returns. If
1381-
// we really wanted to pass ownership, we could create an additional
1382-
// weak pointer for ourselves, but this would result in additional
1383-
// updates to the weak reference count which might not be necessary
1384-
// otherwise.
1385-
let data = data_fn(&weak);
1386-
1387-
// Now we can properly initialize the inner value and turn our weak
1388-
// reference into a strong reference.
1389-
let strong = unsafe {
1390-
let inner = init_ptr.as_ptr();
1391-
ptr::write(ptr::addr_of_mut!((*inner).data), data);
1392-
1393-
// The above write to the data field must be visible to any threads which
1394-
// observe a non-zero strong count. Therefore we need at least "Release" ordering
1395-
// in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
1396-
//
1397-
// "Acquire" ordering is not required. When considering the possible behaviours
1398-
// of `data_fn` we only need to look at what it could do with a reference to a
1399-
// non-upgradeable `Weak`:
1400-
// - It can *clone* the `Weak`, increasing the weak reference count.
1401-
// - It can drop those clones, decreasing the weak reference count (but never to zero).
1402-
//
1403-
// These side effects do not impact us in any way, and no other side effects are
1404-
// possible with safe code alone.
1405-
let prev_value = (*inner).strong.fetch_add(1, Release);
1406-
debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
1407-
1408-
Arc::from_inner_in(init_ptr, alloc)
1409-
};
1410-
1411-
// Strong references should collectively own a shared weak reference,
1412-
// so don't run the destructor for our old weak reference.
1413-
mem::forget(weak);
1414-
strong
1415-
}
1416-
}
1417-
14181417
impl<T: ?Sized> Arc<T> {
14191418
/// Constructs an `Arc<T>` from a raw pointer.
14201419
///

0 commit comments

Comments
 (0)
Please sign in to comment.