@@ -259,7 +259,7 @@ use core::intrinsics::abort;
259
259
#[ cfg( not( no_global_oom_handling) ) ]
260
260
use core:: iter;
261
261
use core:: marker:: { PhantomData , Unsize } ;
262
- use core:: mem:: { self , align_of_val_raw, forget , ManuallyDrop } ;
262
+ use core:: mem:: { self , align_of_val_raw, ManuallyDrop } ;
263
263
use core:: ops:: { CoerceUnsized , Deref , DerefMut , DerefPure , DispatchFromDyn , Receiver } ;
264
264
use core:: panic:: { RefUnwindSafe , UnwindSafe } ;
265
265
#[ cfg( not( no_global_oom_handling) ) ]
@@ -908,19 +908,18 @@ impl<T, A: Allocator> Rc<T, A> {
908
908
#[ stable( feature = "rc_unique" , since = "1.4.0" ) ]
909
909
pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
910
910
if Rc :: strong_count ( & this) == 1 {
911
- unsafe {
912
- let val = ptr:: read ( & * this) ; // copy the contained object
913
- let alloc = ptr:: read ( & this. alloc ) ; // copy the allocator
914
-
915
- // Indicate to Weaks that they can't be promoted by decrementing
916
- // the strong count, and then remove the implicit "strong weak"
917
- // pointer while also handling drop logic by just crafting a
918
- // fake Weak.
919
- this. inner ( ) . dec_strong ( ) ;
920
- let _weak = Weak { ptr : this. ptr , alloc } ;
921
- forget ( this) ;
922
- Ok ( val)
923
- }
911
+ let this = ManuallyDrop :: new ( this) ;
912
+
913
+ let val: T = unsafe { ptr:: read ( & * * this) } ; // copy the contained object
914
+ let alloc: A = unsafe { ptr:: read ( & this. alloc ) } ; // copy the allocator
915
+
916
+ // Indicate to Weaks that they can't be promoted by decrementing
917
+ // the strong count, and then remove the implicit "strong weak"
918
+ // pointer while also handling drop logic by just crafting a
919
+ // fake Weak.
920
+ this. inner ( ) . dec_strong ( ) ;
921
+ let _weak = Weak { ptr : this. ptr , alloc } ;
922
+ Ok ( val)
924
923
} else {
925
924
Err ( this)
926
925
}
@@ -1354,9 +1353,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
1354
1353
#[ stable( feature = "rc_raw" , since = "1.17.0" ) ]
1355
1354
#[ rustc_never_returns_null_ptr]
1356
1355
pub fn into_raw ( this : Self ) -> * const T {
1357
- let ptr = Self :: as_ptr ( & this) ;
1358
- mem:: forget ( this) ;
1359
- ptr
1356
+ let this = ManuallyDrop :: new ( this) ;
1357
+ Self :: as_ptr ( & * this)
1360
1358
}
1361
1359
1362
1360
/// Consumes the `Rc`, returning the wrapped pointer and allocator.
@@ -2127,7 +2125,7 @@ impl<T> Rc<[T]> {
2127
2125
}
2128
2126
2129
2127
// All clear. Forget the guard so it doesn't free the new RcBox.
2130
- forget ( guard) ;
2128
+ mem :: forget ( guard) ;
2131
2129
2132
2130
Self :: from_ptr ( ptr)
2133
2131
}
@@ -3080,9 +3078,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
3080
3078
#[ must_use = "losing the pointer will leak memory" ]
3081
3079
#[ stable( feature = "weak_into_raw" , since = "1.45.0" ) ]
3082
3080
pub fn into_raw ( self ) -> * const T {
3083
- let result = self . as_ptr ( ) ;
3084
- mem:: forget ( self ) ;
3085
- result
3081
+ mem:: ManuallyDrop :: new ( self ) . as_ptr ( )
3086
3082
}
3087
3083
3088
3084
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
@@ -3762,10 +3758,11 @@ impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
3762
3758
/// # Safety
3763
3759
///
3764
3760
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3765
- unsafe fn into_rc ( mut self ) -> Rc < T , A > {
3766
- let ptr = self . ptr ;
3767
- let alloc = self . alloc . take ( ) . unwrap ( ) ;
3768
- mem:: forget ( self ) ;
3761
+ unsafe fn into_rc ( self ) -> Rc < T , A > {
3762
+ let mut this = ManuallyDrop :: new ( self ) ;
3763
+ let ptr = this. ptr ;
3764
+ let alloc = this. alloc . take ( ) . unwrap ( ) ;
3765
+
3769
3766
// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
3770
3767
// for having initialized the data.
3771
3768
unsafe { Rc :: from_ptr_in ( ptr. as_ptr ( ) , alloc) }
0 commit comments