@@ -247,6 +247,7 @@ use crate::boxed::Box;
247
247
#[ cfg( test) ]
248
248
use std:: boxed:: Box ;
249
249
250
+ use core:: alloc:: helper:: PrefixAllocator ;
250
251
use core:: any:: Any ;
251
252
use core:: borrow;
252
253
use core:: cell:: Cell ;
@@ -257,7 +258,7 @@ use core::hash::{Hash, Hasher};
257
258
use core:: intrinsics:: abort;
258
259
use core:: iter;
259
260
use core:: marker:: { self , PhantomData , Unpin , Unsize } ;
260
- use core:: mem:: { self , align_of_val_raw , forget, size_of_val} ;
261
+ use core:: mem:: { self , forget, size_of_val, MaybeUninit } ;
261
262
use core:: ops:: { CoerceUnsized , Deref , DispatchFromDyn , Receiver } ;
262
263
use core:: pin:: Pin ;
263
264
use core:: ptr:: { self , NonNull } ;
@@ -273,13 +274,33 @@ use crate::vec::Vec;
273
274
#[ cfg( test) ]
274
275
mod tests;
275
276
277
+ struct RcBoxMetadata {
278
+ strong : Cell < usize > ,
279
+ weak : Cell < usize > ,
280
+ }
281
+
282
+ impl RcBoxMetadata {
283
+ // There is an implicit weak pointer owned by all the strong
284
+ // pointers, which ensures that the weak destructor never frees
285
+ // the allocation while the strong destructor is running, even
286
+ // if the weak pointer is stored inside the strong one.
287
+ #[ inline]
288
+ fn new_strong ( ) -> Self {
289
+ Self { strong : Cell :: new ( 1 ) , weak : Cell :: new ( 1 ) }
290
+ }
291
+
292
+ #[ inline]
293
+ fn new_weak ( ) -> Self {
294
+ Self { strong : Cell :: new ( 0 ) , weak : Cell :: new ( 1 ) }
295
+ }
296
+ }
297
+
276
298
// This is repr(C) to future-proof against possible field-reordering, which
277
299
// would interfere with otherwise safe [into|from]_raw() of transmutable
278
300
// inner types.
279
301
#[ repr( C ) ]
280
302
struct RcBox < T : ?Sized > {
281
- strong : Cell < usize > ,
282
- weak : Cell < usize > ,
303
+ meta : RcBoxMetadata ,
283
304
value : T ,
284
305
}
285
306
@@ -319,10 +340,12 @@ impl<T: ?Sized> Rc<T> {
319
340
unsafe { self . ptr . as_ref ( ) }
320
341
}
321
342
343
+ #[ inline]
322
344
fn from_inner ( ptr : NonNull < RcBox < T > > ) -> Self {
323
345
Self { ptr, phantom : PhantomData }
324
346
}
325
347
348
+ #[ inline]
326
349
unsafe fn from_ptr ( ptr : * mut RcBox < T > ) -> Self {
327
350
Self :: from_inner ( unsafe { NonNull :: new_unchecked ( ptr) } )
328
351
}
@@ -340,13 +363,7 @@ impl<T> Rc<T> {
340
363
/// ```
341
364
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
342
365
pub fn new ( value : T ) -> Rc < T > {
343
- // There is an implicit weak pointer owned by all the strong
344
- // pointers, which ensures that the weak destructor never frees
345
- // the allocation while the strong destructor is running, even
346
- // if the weak pointer is stored inside the strong one.
347
- Self :: from_inner (
348
- Box :: leak ( box RcBox { strong : Cell :: new ( 1 ) , weak : Cell :: new ( 1 ) , value } ) . into ( ) ,
349
- )
366
+ Self :: from_inner ( Box :: leak ( box RcBox { meta : RcBoxMetadata :: new_strong ( ) , value } ) . into ( ) )
350
367
}
351
368
352
369
/// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
@@ -378,8 +395,7 @@ impl<T> Rc<T> {
378
395
// Construct the inner in the "uninitialized" state with a single
379
396
// weak reference.
380
397
let uninit_ptr: NonNull < _ > = Box :: leak ( box RcBox {
381
- strong : Cell :: new ( 0 ) ,
382
- weak : Cell :: new ( 1 ) ,
398
+ meta : RcBoxMetadata :: new_weak ( ) ,
383
399
value : mem:: MaybeUninit :: < T > :: uninit ( ) ,
384
400
} )
385
401
. into ( ) ;
@@ -400,9 +416,9 @@ impl<T> Rc<T> {
400
416
let inner = init_ptr. as_ptr ( ) ;
401
417
ptr:: write ( ptr:: addr_of_mut!( ( * inner) . value) , data) ;
402
418
403
- let prev_value = ( * inner) . strong . get ( ) ;
419
+ let prev_value = ( * inner) . meta . strong . get ( ) ;
404
420
debug_assert_eq ! ( prev_value, 0 , "No prior strong references should exist" ) ;
405
- ( * inner) . strong . set ( 1 ) ;
421
+ ( * inner) . meta . strong . set ( 1 ) ;
406
422
}
407
423
408
424
let strong = Rc :: from_inner ( init_ptr) ;
@@ -494,8 +510,7 @@ impl<T> Rc<T> {
494
510
// the allocation while the strong destructor is running, even
495
511
// if the weak pointer is stored inside the strong one.
496
512
Ok ( Self :: from_inner (
497
- Box :: leak ( Box :: try_new ( RcBox { strong : Cell :: new ( 1 ) , weak : Cell :: new ( 1 ) , value } ) ?)
498
- . into ( ) ,
513
+ Box :: leak ( Box :: try_new ( RcBox { meta : RcBoxMetadata :: new_strong ( ) , value } ) ?) . into ( ) ,
499
514
) )
500
515
}
501
516
@@ -846,13 +861,7 @@ impl<T: ?Sized> Rc<T> {
846
861
/// ```
847
862
#[ stable( feature = "rc_raw" , since = "1.17.0" ) ]
848
863
pub unsafe fn from_raw ( ptr : * const T ) -> Self {
849
- let offset = unsafe { data_offset ( ptr) } ;
850
-
851
- // Reverse the offset to find the original RcBox.
852
- let rc_ptr =
853
- unsafe { ( ptr as * mut RcBox < T > ) . set_ptr_value ( ( ptr as * mut u8 ) . offset ( -offset) ) } ;
854
-
855
- unsafe { Self :: from_ptr ( rc_ptr) }
864
+ unsafe { Self :: from_data_ptr ( ptr) . assume_init ( ) }
856
865
}
857
866
858
867
/// Creates a new [`Weak`] pointer to this allocation.
@@ -1237,8 +1246,8 @@ impl<T: ?Sized> Rc<T> {
1237
1246
unsafe {
1238
1247
debug_assert_eq ! ( Layout :: for_value( & * inner) , layout) ;
1239
1248
1240
- ptr:: write ( & mut ( * inner) . strong , Cell :: new ( 1 ) ) ;
1241
- ptr:: write ( & mut ( * inner) . weak , Cell :: new ( 1 ) ) ;
1249
+ ptr:: write ( & mut ( * inner) . meta . strong , Cell :: new ( 1 ) ) ;
1250
+ ptr:: write ( & mut ( * inner) . meta . weak , Cell :: new ( 1 ) ) ;
1242
1251
}
1243
1252
1244
1253
Ok ( inner)
@@ -1277,6 +1286,23 @@ impl<T: ?Sized> Rc<T> {
1277
1286
Self :: from_ptr ( ptr)
1278
1287
}
1279
1288
}
1289
+
1290
+ /// # Safety
1291
+ ///
1292
+ /// The caller must ensure that the pointer points to the `value` field of a `Global`
1293
+ /// allocation of type `RcBox<T>`. Depending on how the pointer was created, the
1294
+ /// `meta` field might or might not be uninitialized. It's up to the caller to ensure
1295
+ /// that this field is set to the correct value before the return value is unwrapped.
1296
+ #[ inline]
1297
+ unsafe fn from_data_ptr ( ptr : * const T ) -> MaybeUninit < Self > {
1298
+ let offset = unsafe { data_offset ( ptr) } ;
1299
+
1300
+ // Reverse the offset to find the original RcBox.
1301
+ let rc_ptr =
1302
+ unsafe { ( ptr as * mut RcBox < T > ) . set_ptr_value ( ( ptr as * mut u8 ) . offset ( -offset) ) } ;
1303
+
1304
+ unsafe { MaybeUninit :: new ( Self :: from_ptr ( rc_ptr) ) }
1305
+ }
1280
1306
}
1281
1307
1282
1308
impl < T > Rc < [ T ] > {
@@ -2206,7 +2232,7 @@ impl<T: ?Sized> Weak<T> {
2206
2232
// is dropped, the data field will be dropped in-place).
2207
2233
Some ( unsafe {
2208
2234
let ptr = self . ptr . as_ptr ( ) ;
2209
- WeakInner { strong : & ( * ptr) . strong , weak : & ( * ptr) . weak }
2235
+ WeakInner { strong : & ( * ptr) . meta . strong , weak : & ( * ptr) . meta . weak }
2210
2236
} )
2211
2237
}
2212
2238
}
@@ -2415,12 +2441,12 @@ trait RcInnerPtr {
2415
2441
impl < T : ?Sized > RcInnerPtr for RcBox < T > {
2416
2442
#[ inline( always) ]
2417
2443
fn weak_ref ( & self ) -> & Cell < usize > {
2418
- & self . weak
2444
+ & self . meta . weak
2419
2445
}
2420
2446
2421
2447
#[ inline( always) ]
2422
2448
fn strong_ref ( & self ) -> & Cell < usize > {
2423
- & self . strong
2449
+ & self . meta . strong
2424
2450
}
2425
2451
}
2426
2452
@@ -2453,24 +2479,21 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {
2453
2479
#[ stable( feature = "pin" , since = "1.33.0" ) ]
2454
2480
impl < T : ?Sized > Unpin for Rc < T > { }
2455
2481
2482
+ type RcAllocator = PrefixAllocator < RcBoxMetadata , Global > ;
2483
+
2456
2484
/// Get the offset within an `RcBox` for the payload behind a pointer.
2457
2485
///
2458
2486
/// # Safety
2459
2487
///
2460
2488
/// The pointer must point to (and have valid metadata for) a previously
2461
2489
/// valid instance of T, but the T is allowed to be dropped.
2462
2490
unsafe fn data_offset < T : ?Sized > ( ptr : * const T ) -> isize {
2463
- // Align the unsized value to the end of the RcBox.
2464
- // Because RcBox is repr(C), it will always be the last field in memory.
2465
- // SAFETY: since the only unsized types possible are slices, trait objects,
2466
- // and extern types, the input safety requirement is currently enough to
2467
- // satisfy the requirements of align_of_val_raw; this is an implementation
2468
- // detail of the language that may not be relied upon outside of std.
2469
- unsafe { data_offset_align ( align_of_val_raw ( ptr) ) }
2470
- }
2471
-
2472
- #[ inline]
2473
- fn data_offset_align ( align : usize ) -> isize {
2474
- let layout = Layout :: new :: < RcBox < ( ) > > ( ) ;
2475
- ( layout. size ( ) + layout. padding_needed_for ( align) ) as isize
2491
+ unsafe {
2492
+ // SAFETY: since the only unsized types possible are slices, trait objects,
2493
+ // and extern types, the input safety requirement is currently enough to
2494
+ // satisfy the requirements of for_value_raw; this is an implementation
2495
+ // detail of the language that may not be relied upon outside of std.
2496
+ let layout = Layout :: for_value_raw ( ptr) ;
2497
+ RcAllocator :: prefix_offset ( layout) as isize
2498
+ }
2476
2499
}
0 commit comments