|
| 1 | +use core::alloc::Allocator; |
| 2 | +use core::cell::UnsafeCell; |
| 3 | +use core::marker::PhantomData; |
| 4 | +use core::mem; |
| 5 | +use core::num::NonZeroUsize; |
| 6 | +use core::ops::{Deref, DerefMut}; |
| 7 | +use core::ptr::{self, NonNull}; |
| 8 | + |
| 9 | +use crate::raw_rc::rc_layout::RcLayout; |
| 10 | +use crate::raw_rc::rc_value_pointer::RcValuePointer; |
| 11 | +use crate::raw_rc::{RefCounter, RefCounts}; |
| 12 | + |
| 13 | +// We choose `NonZeroUsize::MAX` as the address for dangling weak pointers because: |
| 14 | +// |
| 15 | +// - It does not point to any object that is stored inside a reference counted allocation. Because |
| 16 | +// otherwise the corresponding `RefCounts` object will be placed at |
| 17 | +// `NonZeroUsize::MAX - size_of::<RefCounts>()`, which is an odd number that violates |
| 18 | +// `RefCounts`'s alignment requirement. |
| 19 | +// - All bytes in the byte representation of `NonZeroUsize::MAX` are the same, which makes it |
| 20 | +// possible to utilize `memset` in certain situations like creating an array of dangling weak |
| 21 | +// pointers. |
| 22 | +const DANGLING_WEAK_ADDRESS: NonZeroUsize = NonZeroUsize::MAX; |
| 23 | + |
| 24 | +// Verify that `DANGLING_WEAK_ADDRESS` is a suitable address for dangling weak pointers. |
| 25 | +const _: () = assert!( |
| 26 | + DANGLING_WEAK_ADDRESS.get().wrapping_sub(size_of::<RefCounts>()) % align_of::<RefCounts>() != 0 |
| 27 | +); |
| 28 | + |
| 29 | +#[inline] |
| 30 | +fn is_dangling(value_ptr: NonNull<()>) -> bool { |
| 31 | + value_ptr.addr() == DANGLING_WEAK_ADDRESS |
| 32 | +} |
| 33 | + |
| 34 | +/// Decrements weak reference count in a reference-counted allocation with a value object that is |
| 35 | +/// pointed to by `value_ptr`. |
| 36 | +#[inline] |
| 37 | +unsafe fn decrement_weak_ref_count<R>(value_ptr: RcValuePointer) -> bool |
| 38 | +where |
| 39 | + R: RefCounter, |
| 40 | +{ |
| 41 | + unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.decrement() |
| 42 | +} |
| 43 | + |
| 44 | +/// Increments weak reference count in a reference-counted allocation with a value object that is |
| 45 | +/// pointed to by `value_ptr`. |
| 46 | +#[inline] |
| 47 | +unsafe fn increment_weak_ref_count<R>(value_ptr: RcValuePointer) |
| 48 | +where |
| 49 | + R: RefCounter, |
| 50 | +{ |
| 51 | + unsafe { R::from_raw_counter(value_ptr.weak_count_ptr().as_ref()) }.increment() |
| 52 | +} |
| 53 | + |
| 54 | +/// Calls `RawWeak::drop_unchecked` on drop. |
| 55 | +pub(super) struct WeakGuard<'a, T, A, R> |
| 56 | +where |
| 57 | + T: ?Sized, |
| 58 | + A: Allocator, |
| 59 | + R: RefCounter, |
| 60 | +{ |
| 61 | + weak: &'a mut RawWeak<T, A>, |
| 62 | + _phantom_data: PhantomData<R>, |
| 63 | +} |
| 64 | + |
| 65 | +impl<'a, T, A, R> WeakGuard<'a, T, A, R> |
| 66 | +where |
| 67 | + T: ?Sized, |
| 68 | + A: Allocator, |
| 69 | + R: RefCounter, |
| 70 | +{ |
| 71 | + /// # Safety |
| 72 | + /// |
| 73 | + /// - `weak` is non-dangling. |
| 74 | + /// - After `WeakGuard` being dropped, the allocation pointed by the weak pointer should not be |
| 75 | + /// accessed anymore. |
| 76 | + pub(super) unsafe fn new(weak: &'a mut RawWeak<T, A>) -> Self { |
| 77 | + Self { weak, _phantom_data: PhantomData } |
| 78 | + } |
| 79 | +} |
| 80 | + |
| 81 | +impl<T, A, R> Deref for WeakGuard<'_, T, A, R> |
| 82 | +where |
| 83 | + T: ?Sized, |
| 84 | + A: Allocator, |
| 85 | + R: RefCounter, |
| 86 | +{ |
| 87 | + type Target = RawWeak<T, A>; |
| 88 | + |
| 89 | + fn deref(&self) -> &Self::Target { |
| 90 | + &*self.weak |
| 91 | + } |
| 92 | +} |
| 93 | + |
| 94 | +impl<T, A, R> DerefMut for WeakGuard<'_, T, A, R> |
| 95 | +where |
| 96 | + T: ?Sized, |
| 97 | + A: Allocator, |
| 98 | + R: RefCounter, |
| 99 | +{ |
| 100 | + fn deref_mut(&mut self) -> &mut Self::Target { |
| 101 | + self.weak |
| 102 | + } |
| 103 | +} |
| 104 | + |
| 105 | +impl<T, A, R> Drop for WeakGuard<'_, T, A, R> |
| 106 | +where |
| 107 | + T: ?Sized, |
| 108 | + A: Allocator, |
| 109 | + R: RefCounter, |
| 110 | +{ |
| 111 | + fn drop(&mut self) { |
| 112 | + unsafe { self.weak.drop_unchecked::<R>() }; |
| 113 | + } |
| 114 | +} |
| 115 | + |
| 116 | +/// Base implementation of a weak pointer. `RawWeak` does not implement `Drop`, user should call |
| 117 | +/// `RawWeak::drop` or `RawWeak::drop_unchecked` manually to drop this object. |
| 118 | +/// |
| 119 | +/// A `RawWeak` can be either dangling or non-dangling. A dangling `RawWeak` does not point to a |
| 120 | +/// valid value. A non-dangling `RawWeak` points to a valid reference-counted allocation. The value |
| 121 | +/// pointed to by a `RawWeak` may be uninitialized. |
| 122 | +pub(crate) struct RawWeak<T, A> |
| 123 | +where |
| 124 | + T: ?Sized, |
| 125 | +{ |
| 126 | + /// Points to a (possibly uninitialized or dropped) `T` value inside of a reference-counted |
| 127 | + /// allocation. |
| 128 | + ptr: NonNull<T>, |
| 129 | + |
| 130 | + /// The allocator for `ptr`. |
| 131 | + alloc: A, |
| 132 | +} |
| 133 | + |
| 134 | +impl<T, A> RawWeak<T, A> |
| 135 | +where |
| 136 | + T: ?Sized, |
| 137 | +{ |
| 138 | + pub(crate) const unsafe fn from_raw_parts(ptr: NonNull<T>, alloc: A) -> Self { |
| 139 | + Self { ptr, alloc } |
| 140 | + } |
| 141 | + |
| 142 | + pub(crate) unsafe fn from_raw(ptr: NonNull<T>) -> Self |
| 143 | + where |
| 144 | + A: Default, |
| 145 | + { |
| 146 | + unsafe { Self::from_raw_parts(ptr, A::default()) } |
| 147 | + } |
| 148 | + |
| 149 | + pub(crate) fn allocator(&self) -> &A { |
| 150 | + &self.alloc |
| 151 | + } |
| 152 | + |
| 153 | + pub(crate) fn as_ptr(&self) -> NonNull<T> { |
| 154 | + self.ptr |
| 155 | + } |
| 156 | + |
| 157 | + #[inline(never)] |
| 158 | + unsafe fn assume_init_drop_slow<R>(&mut self) |
| 159 | + where |
| 160 | + A: Allocator, |
| 161 | + R: RefCounter, |
| 162 | + { |
| 163 | + let guard = unsafe { WeakGuard::<T, A, R>::new(self) }; |
| 164 | + |
| 165 | + unsafe { guard.weak.ptr.drop_in_place() }; |
| 166 | + } |
| 167 | + |
| 168 | + /// Drops the value along with the `RawWeak` object, assuming the value pointed to by `ptr` is |
| 169 | + /// initialized, |
| 170 | + #[inline] |
| 171 | + pub(super) unsafe fn assume_init_drop<R>(&mut self) |
| 172 | + where |
| 173 | + A: Allocator, |
| 174 | + R: RefCounter, |
| 175 | + { |
| 176 | + if const { mem::needs_drop::<T>() } { |
| 177 | + unsafe { self.assume_init_drop_slow::<R>() }; |
| 178 | + } else { |
| 179 | + unsafe { self.drop_unchecked::<R>() }; |
| 180 | + } |
| 181 | + } |
| 182 | + |
| 183 | + pub(crate) unsafe fn cast<U>(self) -> RawWeak<U, A> { |
| 184 | + unsafe { self.cast_with(NonNull::cast) } |
| 185 | + } |
| 186 | + |
| 187 | + #[inline] |
| 188 | + pub(crate) unsafe fn cast_with<U, F>(self, f: F) -> RawWeak<U, A> |
| 189 | + where |
| 190 | + U: ?Sized, |
| 191 | + F: FnOnce(NonNull<T>) -> NonNull<U>, |
| 192 | + { |
| 193 | + unsafe { RawWeak::from_raw_parts(f(self.ptr), self.alloc) } |
| 194 | + } |
| 195 | + |
| 196 | + /// Increments the weak count, and returns the corresponding `RawWeak` object. |
| 197 | + /// |
| 198 | + /// # Safety |
| 199 | + /// |
| 200 | + /// - `self` should only be handled by the same `RefCounter` implementation. |
| 201 | + #[inline] |
| 202 | + pub(crate) unsafe fn clone<R>(&self) -> Self |
| 203 | + where |
| 204 | + A: Clone, |
| 205 | + R: RefCounter, |
| 206 | + { |
| 207 | + unsafe fn inner<R>(ptr: NonNull<()>) |
| 208 | + where |
| 209 | + R: RefCounter, |
| 210 | + { |
| 211 | + if !is_dangling(ptr) { |
| 212 | + unsafe { increment_weak_ref_count::<R>(RcValuePointer::new(ptr)) }; |
| 213 | + } |
| 214 | + } |
| 215 | + |
| 216 | + unsafe { |
| 217 | + inner::<R>(self.ptr.cast()); |
| 218 | + |
| 219 | + Self::from_raw_parts(self.ptr, self.alloc.clone()) |
| 220 | + } |
| 221 | + } |
| 222 | + |
| 223 | + /// Increments the weak count, and returns the corresponding `RawWeak` object, assuming `self` |
| 224 | + /// is non-dangling. |
| 225 | + /// |
| 226 | + /// # Safety |
| 227 | + /// |
| 228 | + /// - `self` should only be handled by the same `RefCounter` implementation. |
| 229 | + /// - `self` is non-dangling. |
| 230 | + pub(crate) unsafe fn clone_unchecked<R>(&self) -> Self |
| 231 | + where |
| 232 | + A: Clone, |
| 233 | + R: RefCounter, |
| 234 | + { |
| 235 | + unsafe { |
| 236 | + increment_weak_ref_count::<R>(self.value_ptr_unchecked()); |
| 237 | + |
| 238 | + Self::from_raw_parts(self.ptr, self.alloc.clone()) |
| 239 | + } |
| 240 | + } |
| 241 | + |
| 242 | + /// Drops this weak pointer. |
| 243 | + #[inline] |
| 244 | + pub(crate) unsafe fn drop<R>(&mut self) |
| 245 | + where |
| 246 | + A: Allocator, |
| 247 | + R: RefCounter, |
| 248 | + { |
| 249 | + if !is_dangling(self.ptr.cast()) { |
| 250 | + unsafe { self.drop_unchecked::<R>() }; |
| 251 | + } |
| 252 | + } |
| 253 | + |
| 254 | + /// Drops this weak pointer, assuming `self` is non-dangling. |
| 255 | + #[inline] |
| 256 | + pub(super) unsafe fn drop_unchecked<R>(&mut self) |
| 257 | + where |
| 258 | + A: Allocator, |
| 259 | + R: RefCounter, |
| 260 | + { |
| 261 | + // SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value |
| 262 | + // location in a valid reference-counted allocation. |
| 263 | + let value_ptr = unsafe { self.value_ptr_unchecked() }; |
| 264 | + |
| 265 | + let is_last_weak_ref = unsafe { decrement_weak_ref_count::<R>(value_ptr) }; |
| 266 | + |
| 267 | + if is_last_weak_ref { |
| 268 | + let rc_layout = unsafe { RcLayout::from_value_ptr_unchecked(self.ptr) }; |
| 269 | + |
| 270 | + unsafe { super::deallocate::<A>(value_ptr, &self.alloc, rc_layout) } |
| 271 | + } |
| 272 | + } |
| 273 | + |
| 274 | + pub(crate) fn into_raw(self) -> NonNull<T> { |
| 275 | + self.ptr |
| 276 | + } |
| 277 | + |
| 278 | + pub(crate) fn into_raw_parts(self) -> (NonNull<T>, A) { |
| 279 | + (self.ptr, self.alloc) |
| 280 | + } |
| 281 | + |
| 282 | + pub(crate) fn ptr_eq(&self, other: &Self) -> bool { |
| 283 | + ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) |
| 284 | + } |
| 285 | + |
| 286 | + pub(crate) fn ptr_ne(&self, other: &Self) -> bool { |
| 287 | + !ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr()) |
| 288 | + } |
| 289 | + |
| 290 | + /// Returns the `RefCounts` object inside the reference-counted allocation, assume `self` is |
| 291 | + /// non-dangling. |
| 292 | + /// |
| 293 | + /// # Safety |
| 294 | + /// |
| 295 | + /// `self` is non-dangling. |
| 296 | + #[cfg(not(no_global_oom_handling))] |
| 297 | + pub(super) unsafe fn ref_counts_unchecked(&self) -> &RefCounts { |
| 298 | + unsafe { self.value_ptr_unchecked().ref_counts_ptr().as_ref() } |
| 299 | + } |
| 300 | + |
| 301 | + /// Returns the strong reference count object inside the reference-counted allocation if `self` |
| 302 | + /// is non-dangling. |
| 303 | + pub(crate) fn strong_count(&self) -> Option<&UnsafeCell<usize>> { |
| 304 | + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.strong_count_unchecked() }) |
| 305 | + } |
| 306 | + |
| 307 | + /// Returns the strong reference count object inside the reference-counted allocation, assume |
| 308 | + /// `self` is non-dangling. |
| 309 | + /// |
| 310 | + /// # Safety |
| 311 | + /// |
| 312 | + /// `self` is non-dangling. |
| 313 | + pub(super) unsafe fn strong_count_unchecked(&self) -> &UnsafeCell<usize> { |
| 314 | + unsafe { self.value_ptr_unchecked().strong_count_ptr().as_ref() } |
| 315 | + } |
| 316 | + |
| 317 | + /// Returns the weak reference count object inside the reference-counted allocation if `self` |
| 318 | + /// is non-dangling. |
| 319 | + pub(crate) fn weak_count(&self) -> Option<&UnsafeCell<usize>> { |
| 320 | + (!is_dangling(self.ptr.cast())).then(|| unsafe { self.weak_count_unchecked() }) |
| 321 | + } |
| 322 | + |
| 323 | + /// Returns the weak reference count object inside the reference-counted allocation, assume |
| 324 | + /// `self` is non-dangling. |
| 325 | + /// |
| 326 | + /// # Safety |
| 327 | + /// |
| 328 | + /// `self` is non-dangling. |
| 329 | + pub(super) unsafe fn weak_count_unchecked(&self) -> &UnsafeCell<usize> { |
| 330 | + unsafe { self.value_ptr_unchecked().weak_count_ptr().as_ref() } |
| 331 | + } |
| 332 | + |
| 333 | + /// Sets the contained pointer to a new value. |
| 334 | + /// |
| 335 | + /// # Safety |
| 336 | + /// |
| 337 | + /// - `ptr` should be a valid pointer to a value object that lives in a reference-counted |
| 338 | + /// allocation. |
| 339 | + /// - The allocation can be deallocated with the associated allocator. |
| 340 | + #[cfg(not(no_global_oom_handling))] |
| 341 | + pub(super) unsafe fn set_ptr(&mut self, ptr: NonNull<T>) { |
| 342 | + self.ptr = ptr; |
| 343 | + } |
| 344 | + |
| 345 | + /// Returns a pointer to the value location of the reference-counted allocation, assume `self` |
| 346 | + /// is non-dangling. |
| 347 | + /// |
| 348 | + /// # Safety |
| 349 | + /// |
| 350 | + /// `self` is non-dangling. |
| 351 | + #[inline] |
| 352 | + pub(super) unsafe fn value_ptr_unchecked(&self) -> RcValuePointer { |
| 353 | + // SAFETY: Caller guarantees `self` is non-dangling, so `self.ptr` must point to the value |
| 354 | + // location in a valid reference-counted allocation. |
| 355 | + unsafe { RcValuePointer::new(self.ptr.cast()) } |
| 356 | + } |
| 357 | +} |
0 commit comments