68
68
//! ```
69
69
70
70
use core:: atomic;
71
+ use core:: atomic:: Ordering :: { Relaxed , Release , Acquire , SeqCst } ;
71
72
use core:: borrow:: BorrowFrom ;
72
73
use core:: clone:: Clone ;
73
74
use core:: fmt:: { mod, Show } ;
@@ -182,7 +183,7 @@ impl<T> Arc<T> {
182
183
#[ experimental = "Weak pointers may not belong in this module." ]
183
184
pub fn downgrade ( & self ) -> Weak < T > {
184
185
// See the clone() impl for why this is relaxed
185
- self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
186
+ self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
186
187
Weak { _ptr : self . _ptr }
187
188
}
188
189
}
@@ -201,12 +202,12 @@ impl<T> Arc<T> {
201
202
/// Get the number of weak references to this value.
202
203
#[ inline]
203
204
#[ experimental]
204
- pub fn weak_count < T > ( this : & Arc < T > ) -> uint { this. inner ( ) . weak . load ( atomic :: SeqCst ) - 1 }
205
+ pub fn weak_count < T > ( this : & Arc < T > ) -> uint { this. inner ( ) . weak . load ( SeqCst ) - 1 }
205
206
206
207
/// Get the number of strong references to this value.
207
208
#[ inline]
208
209
#[ experimental]
209
- pub fn strong_count < T > ( this : & Arc < T > ) -> uint { this. inner ( ) . strong . load ( atomic :: SeqCst ) }
210
+ pub fn strong_count < T > ( this : & Arc < T > ) -> uint { this. inner ( ) . strong . load ( SeqCst ) }
210
211
211
212
#[ stable]
212
213
impl < T > Clone for Arc < T > {
@@ -234,7 +235,7 @@ impl<T> Clone for Arc<T> {
234
235
// must already provide any required synchronization.
235
236
//
236
237
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
237
- self . inner ( ) . strong . fetch_add ( 1 , atomic :: Relaxed ) ;
238
+ self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
238
239
Arc { _ptr : self . _ptr }
239
240
}
240
241
}
@@ -273,8 +274,8 @@ impl<T: Send + Sync + Clone> Arc<T> {
273
274
pub fn make_unique ( & mut self ) -> & mut T {
274
275
// Note that we hold a strong reference, which also counts as a weak reference, so we only
275
276
// clone if there is an additional reference of either kind.
276
- if self . inner ( ) . strong . load ( atomic :: SeqCst ) != 1 ||
277
- self . inner ( ) . weak . load ( atomic :: SeqCst ) != 1 {
277
+ if self . inner ( ) . strong . load ( SeqCst ) != 1 ||
278
+ self . inner ( ) . weak . load ( SeqCst ) != 1 {
278
279
* self = Arc :: new ( ( * * self ) . clone ( ) )
279
280
}
280
281
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
@@ -322,7 +323,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
322
323
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
323
324
// unless we are going to delete the object. This same logic applies to the below
324
325
// `fetch_sub` to the `weak` count.
325
- if self . inner ( ) . strong . fetch_sub ( 1 , atomic :: Release ) != 1 { return }
326
+ if self . inner ( ) . strong . fetch_sub ( 1 , Release ) != 1 { return }
326
327
327
328
// This fence is needed to prevent reordering of use of the data and deletion of the data.
328
329
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
@@ -339,14 +340,14 @@ impl<T: Sync + Send> Drop for Arc<T> {
339
340
// > operation before deleting the object.
340
341
//
341
342
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
342
- atomic:: fence ( atomic :: Acquire ) ;
343
+ atomic:: fence ( Acquire ) ;
343
344
344
345
// Destroy the data at this time, even though we may not free the box allocation itself
345
346
// (there may still be weak pointers lying around).
346
347
unsafe { drop ( ptr:: read ( & self . inner ( ) . data ) ) ; }
347
348
348
- if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
349
- atomic:: fence ( atomic :: Acquire ) ;
349
+ if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
350
+ atomic:: fence ( Acquire ) ;
350
351
unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
351
352
min_align_of :: < ArcInner < T > > ( ) ) }
352
353
}
@@ -377,9 +378,9 @@ impl<T: Sync + Send> Weak<T> {
377
378
// count hits 0 is must never be above 0.
378
379
let inner = self . inner ( ) ;
379
380
loop {
380
- let n = inner. strong . load ( atomic :: SeqCst ) ;
381
+ let n = inner. strong . load ( SeqCst ) ;
381
382
if n == 0 { return None }
382
- let old = inner. strong . compare_and_swap ( n, n + 1 , atomic :: SeqCst ) ;
383
+ let old = inner. strong . compare_and_swap ( n, n + 1 , SeqCst ) ;
383
384
if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
384
385
}
385
386
}
@@ -409,7 +410,7 @@ impl<T: Sync + Send> Clone for Weak<T> {
409
410
#[ inline]
410
411
fn clone ( & self ) -> Weak < T > {
411
412
// See comments in Arc::clone() for why this is relaxed
412
- self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
413
+ self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
413
414
Weak { _ptr : self . _ptr }
414
415
}
415
416
}
@@ -450,8 +451,8 @@ impl<T: Sync + Send> Drop for Weak<T> {
450
451
451
452
// If we find out that we were the last weak pointer, then its time to deallocate the data
452
453
// entirely. See the discussion in Arc::drop() about the memory orderings
453
- if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
454
- atomic:: fence ( atomic :: Acquire ) ;
454
+ if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
455
+ atomic:: fence ( Acquire ) ;
455
456
unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
456
457
min_align_of :: < ArcInner < T > > ( ) ) }
457
458
}
@@ -613,7 +614,7 @@ mod tests {
613
614
unsafe {
614
615
match * self {
615
616
Canary ( c) => {
616
- ( * c) . fetch_add ( 1 , atomic :: SeqCst ) ;
617
+ ( * c) . fetch_add ( 1 , SeqCst ) ;
617
618
}
618
619
}
619
620
}
@@ -732,17 +733,17 @@ mod tests {
732
733
let mut canary = atomic:: AtomicUint :: new ( 0 ) ;
733
734
let x = Arc :: new ( Canary ( & mut canary as * mut atomic:: AtomicUint ) ) ;
734
735
drop ( x) ;
735
- assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
736
+ assert ! ( canary. load( Acquire ) == 1 ) ;
736
737
}
737
738
738
739
#[ test]
739
740
fn drop_arc_weak ( ) {
740
741
let mut canary = atomic:: AtomicUint :: new ( 0 ) ;
741
742
let arc = Arc :: new ( Canary ( & mut canary as * mut atomic:: AtomicUint ) ) ;
742
743
let arc_weak = arc. downgrade ( ) ;
743
- assert ! ( canary. load( atomic :: Acquire ) == 0 ) ;
744
+ assert ! ( canary. load( Acquire ) == 0 ) ;
744
745
drop ( arc) ;
745
- assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
746
+ assert ! ( canary. load( Acquire ) == 1 ) ;
746
747
drop ( arc_weak) ;
747
748
}
748
749
0 commit comments