Skip to content

Commit 393e377

Browse files
committed
Add RawRc type
1 parent 99522fc commit 393e377

File tree

3 files changed

+377
-0
lines changed

3 files changed

+377
-0
lines changed

library/alloc/src/raw_rc/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ use crate::alloc;
7474
use crate::raw_rc::rc_layout::RcLayout;
7575
use crate::raw_rc::rc_value_pointer::RcValuePointer;
7676

77+
mod raw_rc;
7778
mod raw_weak;
7879
mod rc_layout;
7980
mod rc_value_pointer;

library/alloc/src/raw_rc/raw_rc.rs

Lines changed: 349 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,349 @@
1+
use core::alloc::Allocator;
2+
use core::cell::UnsafeCell;
3+
#[cfg(not(no_global_oom_handling))]
4+
use core::clone::CloneToUninit;
5+
#[cfg(not(no_global_oom_handling))]
6+
use core::marker::PhantomData;
7+
#[cfg(not(no_global_oom_handling))]
8+
use core::mem;
9+
use core::ptr::NonNull;
10+
11+
use crate::raw_rc::RefCounter;
12+
use crate::raw_rc::raw_weak::RawWeak;
13+
#[cfg(not(no_global_oom_handling))]
14+
use crate::raw_rc::rc_layout::RcLayout;
15+
use crate::raw_rc::rc_value_pointer::RcValuePointer;
16+
17+
/// Decrements strong reference count in a reference-counted allocation with a value object that is
18+
/// pointed to by `value_ptr`.
19+
#[inline]
20+
unsafe fn decrement_strong_ref_count<R>(value_ptr: RcValuePointer) -> bool
21+
where
22+
R: RefCounter,
23+
{
24+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).decrement() }
25+
}
26+
27+
/// Increments strong reference count in a reference-counted allocation with a value object that is
28+
/// pointed to by `value_ptr`.
29+
#[inline]
30+
unsafe fn increment_strong_ref_count<R>(value_ptr: RcValuePointer)
31+
where
32+
R: RefCounter,
33+
{
34+
unsafe { R::from_raw_counter(value_ptr.strong_count_ptr().as_ref()).increment() };
35+
}
36+
37+
#[inline]
38+
unsafe fn is_unique<R>(value_ptr: RcValuePointer) -> bool
39+
where
40+
R: RefCounter,
41+
{
42+
let ref_counts = unsafe { value_ptr.ref_counts_ptr().as_ref() };
43+
44+
unsafe {
45+
R::is_unique(R::from_raw_counter(&ref_counts.strong), R::from_raw_counter(&ref_counts.weak))
46+
}
47+
}
48+
49+
/// Base implementation of a strong pointer. `RawRc` does not implement `Drop`, user should call
50+
/// `RawRc::drop` manually to drop this object.
51+
#[repr(transparent)]
52+
pub(crate) struct RawRc<T, A>
53+
where
54+
T: ?Sized,
55+
{
56+
/// A `RawRc` is just a non-dangling `RawWeak` that has a strong reference count that is owned
57+
/// by the `RawRc` object. The weak pointer is always non-dangling.
58+
weak: RawWeak<T, A>,
59+
60+
// Defines the ownership of `T` for drop-check.
61+
_phantom_data: PhantomData<T>,
62+
}
63+
64+
impl<T, A> RawRc<T, A>
65+
where
66+
T: ?Sized,
67+
{
68+
/// # Safety
69+
///
70+
/// - `ptr` points to a value inside a reference-counted allocation.
71+
/// - The allocation can be freed by `A::default()`.
72+
pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self
73+
where
74+
A: Default,
75+
{
76+
unsafe { Self::from_raw_parts(ptr, A::default()) }
77+
}
78+
79+
/// # Safety
80+
///
81+
/// - `ptr` points to a value inside a reference-counted allocation.
82+
/// - The allocation can be freed by `alloc`.
83+
pub(crate) unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self {
84+
unsafe { Self::from_weak(RawWeak::from_raw_parts(ptr, alloc)) }
85+
}
86+
87+
/// # Safety
88+
///
89+
/// `weak` must have at least one unowned strong reference count. The newly created `RawRc` will
90+
/// take the ownership of exactly one strong reference count.
91+
pub(super) unsafe fn from_weak(weak: RawWeak<T, A>) -> Self {
92+
Self { weak, _phantom_data: PhantomData }
93+
}
94+
95+
pub(crate) fn allocator(&self) -> &A {
96+
&self.weak.allocator()
97+
}
98+
99+
pub(crate) fn as_ptr(&self) -> NonNull<T> {
100+
self.weak.as_ptr()
101+
}
102+
103+
pub(crate) unsafe fn cast<U>(self) -> RawRc<U, A> {
104+
unsafe { RawRc::from_weak(self.weak.cast()) }
105+
}
106+
107+
#[inline]
108+
pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawRc<U, A>
109+
where
110+
U: ?Sized,
111+
F: FnOnce(NonNull<T>) -> NonNull<U>,
112+
{
113+
unsafe { RawRc::from_weak(self.weak.cast_with(f)) }
114+
}
115+
116+
#[inline]
117+
pub(crate) unsafe fn clone<R>(&self) -> Self
118+
where
119+
A: Clone,
120+
R: RefCounter,
121+
{
122+
unsafe {
123+
increment_strong_ref_count::<R>(self.value_ptr());
124+
125+
Self::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
126+
}
127+
}
128+
129+
pub(crate) unsafe fn decrement_strong_count<R: RefCounter>(ptr: NonNull<T>)
130+
where
131+
A: Allocator + Default,
132+
{
133+
unsafe { Self::decrement_strong_count_in::<R>(ptr, A::default()) };
134+
}
135+
136+
pub(crate) unsafe fn decrement_strong_count_in<R: RefCounter>(ptr: NonNull<T>, alloc: A)
137+
where
138+
A: Allocator,
139+
{
140+
unsafe { RawRc::from_raw_parts(ptr, alloc).drop::<R>() };
141+
}
142+
143+
pub(crate) unsafe fn increment_strong_count<R: RefCounter>(ptr: NonNull<T>) {
144+
unsafe { increment_strong_ref_count::<R>(RcValuePointer::new(ptr.cast())) };
145+
}
146+
147+
pub(crate) unsafe fn downgrade<R>(&self) -> RawWeak<T, A>
148+
where
149+
A: Clone,
150+
R: RefCounter,
151+
{
152+
unsafe fn inner<R>(value_ptr: RcValuePointer)
153+
where
154+
R: RefCounter,
155+
{
156+
unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()).downgrade() };
157+
}
158+
159+
unsafe {
160+
inner::<R>(self.value_ptr());
161+
162+
RawWeak::from_raw_parts(self.weak.as_ptr(), self.allocator().clone())
163+
}
164+
}
165+
166+
#[inline]
167+
pub(crate) unsafe fn drop<R>(&mut self)
168+
where
169+
A: Allocator,
170+
R: RefCounter,
171+
{
172+
let is_last_strong_ref = unsafe { decrement_strong_ref_count::<R>(self.value_ptr()) };
173+
174+
if is_last_strong_ref {
175+
unsafe { self.weak.assume_init_drop::<R>() }
176+
}
177+
}
178+
179+
pub(crate) unsafe fn get_mut<R>(&mut self) -> Option<&mut T>
180+
where
181+
R: RefCounter,
182+
{
183+
unsafe fn inner<R>(value_ptr: RcValuePointer) -> Option<RcValuePointer>
184+
where
185+
R: RefCounter,
186+
{
187+
unsafe { is_unique::<R>(value_ptr) }.then_some(value_ptr)
188+
}
189+
190+
let (ptr, metadata) = self.weak.as_ptr().to_raw_parts();
191+
192+
unsafe { inner::<R>(RcValuePointer::new(ptr)) }
193+
.map(|ptr| unsafe { NonNull::from_raw_parts(ptr.as_ptr(), metadata).as_mut() })
194+
}
195+
196+
/// Returns a mutable reference to the contained value.
197+
///
198+
/// # Safety
199+
///
200+
/// No other active references to the contained value should exist, and no new references to the
201+
/// contained value will be acquired for the duration of the returned borrow.
202+
pub(crate) unsafe fn get_mut_unchecked(&mut self) -> &mut T {
203+
// SAFETY: The caller guarantees that we can access the contained value exclusively. Note
204+
// that we can't create mutable references that have access to reference counters, because
205+
// the caller only guarantee exclusive access to the contained value, not the reference
206+
// counters.
207+
unsafe { self.weak.as_ptr().as_mut() }
208+
}
209+
210+
pub(crate) fn into_raw(self) -> NonNull<T> {
211+
self.weak.into_raw()
212+
}
213+
214+
pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) {
215+
self.weak.into_raw_parts()
216+
}
217+
218+
#[cfg(not(no_global_oom_handling))]
219+
pub(crate) unsafe fn make_mut<R>(&mut self) -> &mut T
220+
where
221+
T: CloneToUninit,
222+
A: Allocator + Clone,
223+
R: RefCounter,
224+
{
225+
use core::ptr;
226+
227+
use crate::raw_rc::MakeMutStrategy;
228+
use crate::raw_rc::raw_weak::WeakGuard;
229+
230+
struct SetRcPtrOnDrop<'a, T, A>
231+
where
232+
T: ?Sized,
233+
{
234+
rc: &'a mut RawRc<T, A>,
235+
new_ptr: NonNull<T>,
236+
}
237+
238+
impl<T, A> Drop for SetRcPtrOnDrop<'_, T, A>
239+
where
240+
T: ?Sized,
241+
{
242+
fn drop(&mut self) {
243+
unsafe { self.rc.weak.set_ptr(self.new_ptr) };
244+
}
245+
}
246+
247+
unsafe {
248+
let ref_counts = self.ref_counts();
249+
250+
if let Some(strategy) = R::make_mut(
251+
R::from_raw_counter(&ref_counts.strong),
252+
R::from_raw_counter(&ref_counts.weak),
253+
) {
254+
let rc_layout = RcLayout::from_value_ptr_unchecked(self.weak.as_ptr());
255+
256+
match strategy {
257+
MakeMutStrategy::Move => {
258+
// `R::make_mut` has made strong reference count to zero, so the `RawRc`
259+
// object is essentially a `RawWeak` object but has its value initialized.
260+
// This means we are the only owner of the value and we can safely move the
261+
// value into a new allocation.
262+
263+
// This guarantees to drop old `RawRc` object even if the allocation
264+
// panics.
265+
let guard = WeakGuard::<T, A, R>::new(&mut self.weak);
266+
267+
let new_ptr = super::allocate_with_bytes_in::<A, 1>(
268+
guard.as_ptr().cast(),
269+
&guard.allocator(),
270+
rc_layout,
271+
);
272+
273+
// No panic happens, defuse the guard.
274+
mem::forget(guard);
275+
276+
let new_ptr = NonNull::from_raw_parts(
277+
new_ptr.as_ptr(),
278+
ptr::metadata(self.weak.as_ptr().as_ptr()),
279+
);
280+
281+
// Ensure the value pointer in `self` is updated to `new_ptr`.
282+
let update_ptr_on_drop = SetRcPtrOnDrop { rc: self, new_ptr };
283+
284+
// `MakeMutStrategy::Move` guarantees that the strong count is zero, also we
285+
// have copied the value to a new allocation, so we can pretend the original
286+
// `RawRc` is now essentially an `RawWeak` object, we can call the `RawWeak`
287+
// destructor to finish the cleanup.
288+
update_ptr_on_drop.rc.weak.drop_unchecked::<R>();
289+
}
290+
MakeMutStrategy::Clone => {
291+
// There are multiple owners of the value, we need to clone the value into a
292+
// new allocation.
293+
294+
let new_ptr = super::allocate_with_in::<A, _, 1>(
295+
&self.allocator(),
296+
rc_layout,
297+
|dst_ptr| {
298+
T::clone_to_uninit(
299+
self.as_ptr().as_ref(),
300+
dst_ptr.as_ptr().as_ptr().cast(),
301+
)
302+
},
303+
);
304+
305+
let new_ptr = NonNull::from_raw_parts(
306+
new_ptr.as_ptr(),
307+
ptr::metadata(self.weak.as_ptr().as_ptr()),
308+
);
309+
310+
// Ensure the value pointer in `self` is updated to `new_ptr`.
311+
let update_ptr_on_drop = SetRcPtrOnDrop { rc: self, new_ptr };
312+
313+
// Manually drop old `RawRc`.
314+
update_ptr_on_drop.rc.drop::<R>();
315+
}
316+
}
317+
}
318+
319+
self.get_mut_unchecked()
320+
}
321+
}
322+
323+
pub(crate) fn ptr_eq(&self, other: &Self) -> bool {
324+
RawWeak::ptr_eq(&self.weak, &other.weak)
325+
}
326+
327+
pub(crate) fn ptr_ne(&self, other: &Self) -> bool {
328+
RawWeak::ptr_ne(&self.weak, &other.weak)
329+
}
330+
331+
#[cfg(not(no_global_oom_handling))]
332+
pub(crate) fn ref_counts(&self) -> &crate::raw_rc::RefCounts {
333+
unsafe { self.weak.ref_counts_unchecked() }
334+
}
335+
336+
pub(crate) fn strong_count(&self) -> &UnsafeCell<usize> {
337+
unsafe { self.weak.strong_count_unchecked() }
338+
}
339+
340+
pub(crate) fn weak_count(&self) -> &UnsafeCell<usize> {
341+
unsafe { self.weak.weak_count_unchecked() }
342+
}
343+
344+
#[inline]
345+
fn value_ptr(&self) -> RcValuePointer {
346+
// SAFETY: `self.weak` is guaranteed to be non-dangling.
347+
unsafe { self.weak.value_ptr_unchecked() }
348+
}
349+
}

library/alloc/src/raw_rc/raw_weak.rs

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ use core::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn};
88
use core::ptr::{self, NonNull};
99

1010
use crate::alloc::Global;
11+
use crate::raw_rc::raw_rc::RawRc;
1112
use crate::raw_rc::rc_layout::{RcLayout, RcLayoutExt};
1213
use crate::raw_rc::rc_value_pointer::RcValuePointer;
1314
use crate::raw_rc::{RefCounter, RefCounts};
@@ -344,6 +345,32 @@ where
344345
self.ptr = ptr;
345346
}
346347

348+
/// Creates a `RawRc` object if there are non-zero strong reference counts.
349+
///
350+
/// # Safety
351+
///
352+
/// `self` should only be handled by the same `RefCounter` implementation.
353+
pub(crate) unsafe fn upgrade<R>(&self) -> Option<RawRc<T, A>>
354+
where
355+
A: Clone,
356+
R: RefCounter,
357+
{
358+
unsafe fn inner<R>(value_ptr: NonNull<()>) -> bool
359+
where
360+
R: RefCounter,
361+
{
362+
(!is_dangling(value_ptr))
363+
&& unsafe {
364+
R::from_raw_counter(RcValuePointer::new(value_ptr).strong_count_ptr().as_ref())
365+
.upgrade()
366+
}
367+
}
368+
369+
unsafe {
370+
inner::<R>(self.ptr.cast()).then(|| RawRc::from_raw_parts(self.ptr, self.alloc.clone()))
371+
}
372+
}
373+
347374
/// Returns a pointer to the value location of the reference-counted allocation, assume `self`
348375
/// is non-dangling.
349376
///

0 commit comments

Comments
 (0)