Skip to content

Commit 1d26fb9

Browse files
committed
Remove core::atomic::Ordering::* public reexport
Part of #19253 [breaking-change]
1 parent 023dfb0 commit 1d26fb9

File tree

3 files changed

+22
-21
lines changed

3 files changed

+22
-21
lines changed

src/liballoc/arc.rs

+20-19
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@
6868
//! ```
6969
7070
use core::atomic;
71+
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
7172
use core::borrow::BorrowFrom;
7273
use core::clone::Clone;
7374
use core::fmt::{mod, Show};
@@ -182,7 +183,7 @@ impl<T> Arc<T> {
182183
#[experimental = "Weak pointers may not belong in this module."]
183184
pub fn downgrade(&self) -> Weak<T> {
184185
// See the clone() impl for why this is relaxed
185-
self.inner().weak.fetch_add(1, atomic::Relaxed);
186+
self.inner().weak.fetch_add(1, Relaxed);
186187
Weak { _ptr: self._ptr }
187188
}
188189
}
@@ -201,12 +202,12 @@ impl<T> Arc<T> {
201202
/// Get the number of weak references to this value.
202203
#[inline]
203204
#[experimental]
204-
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(atomic::SeqCst) - 1 }
205+
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
205206

206207
/// Get the number of strong references to this value.
207208
#[inline]
208209
#[experimental]
209-
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(atomic::SeqCst) }
210+
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
210211

211212
#[stable]
212213
impl<T> Clone for Arc<T> {
@@ -234,7 +235,7 @@ impl<T> Clone for Arc<T> {
234235
// must already provide any required synchronization.
235236
//
236237
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
237-
self.inner().strong.fetch_add(1, atomic::Relaxed);
238+
self.inner().strong.fetch_add(1, Relaxed);
238239
Arc { _ptr: self._ptr }
239240
}
240241
}
@@ -273,8 +274,8 @@ impl<T: Send + Sync + Clone> Arc<T> {
273274
pub fn make_unique(&mut self) -> &mut T {
274275
// Note that we hold a strong reference, which also counts as a weak reference, so we only
275276
// clone if there is an additional reference of either kind.
276-
if self.inner().strong.load(atomic::SeqCst) != 1 ||
277-
self.inner().weak.load(atomic::SeqCst) != 1 {
277+
if self.inner().strong.load(SeqCst) != 1 ||
278+
self.inner().weak.load(SeqCst) != 1 {
278279
*self = Arc::new((**self).clone())
279280
}
280281
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
@@ -322,7 +323,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
322323
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
323324
// unless we are going to delete the object. This same logic applies to the below
324325
// `fetch_sub` to the `weak` count.
325-
if self.inner().strong.fetch_sub(1, atomic::Release) != 1 { return }
326+
if self.inner().strong.fetch_sub(1, Release) != 1 { return }
326327

327328
// This fence is needed to prevent reordering of use of the data and deletion of the data.
328329
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
@@ -339,14 +340,14 @@ impl<T: Sync + Send> Drop for Arc<T> {
339340
// > operation before deleting the object.
340341
//
341342
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
342-
atomic::fence(atomic::Acquire);
343+
atomic::fence(Acquire);
343344

344345
// Destroy the data at this time, even though we may not free the box allocation itself
345346
// (there may still be weak pointers lying around).
346347
unsafe { drop(ptr::read(&self.inner().data)); }
347348

348-
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
349-
atomic::fence(atomic::Acquire);
349+
if self.inner().weak.fetch_sub(1, Release) == 1 {
350+
atomic::fence(Acquire);
350351
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
351352
min_align_of::<ArcInner<T>>()) }
352353
}
@@ -377,9 +378,9 @@ impl<T: Sync + Send> Weak<T> {
377378
// count hits 0 is must never be above 0.
378379
let inner = self.inner();
379380
loop {
380-
let n = inner.strong.load(atomic::SeqCst);
381+
let n = inner.strong.load(SeqCst);
381382
if n == 0 { return None }
382-
let old = inner.strong.compare_and_swap(n, n + 1, atomic::SeqCst);
383+
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
383384
if old == n { return Some(Arc { _ptr: self._ptr }) }
384385
}
385386
}
@@ -409,7 +410,7 @@ impl<T: Sync + Send> Clone for Weak<T> {
409410
#[inline]
410411
fn clone(&self) -> Weak<T> {
411412
// See comments in Arc::clone() for why this is relaxed
412-
self.inner().weak.fetch_add(1, atomic::Relaxed);
413+
self.inner().weak.fetch_add(1, Relaxed);
413414
Weak { _ptr: self._ptr }
414415
}
415416
}
@@ -450,8 +451,8 @@ impl<T: Sync + Send> Drop for Weak<T> {
450451

451452
// If we find out that we were the last weak pointer, then its time to deallocate the data
452453
// entirely. See the discussion in Arc::drop() about the memory orderings
453-
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
454-
atomic::fence(atomic::Acquire);
454+
if self.inner().weak.fetch_sub(1, Release) == 1 {
455+
atomic::fence(Acquire);
455456
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
456457
min_align_of::<ArcInner<T>>()) }
457458
}
@@ -613,7 +614,7 @@ mod tests {
613614
unsafe {
614615
match *self {
615616
Canary(c) => {
616-
(*c).fetch_add(1, atomic::SeqCst);
617+
(*c).fetch_add(1, SeqCst);
617618
}
618619
}
619620
}
@@ -732,17 +733,17 @@ mod tests {
732733
let mut canary = atomic::AtomicUint::new(0);
733734
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
734735
drop(x);
735-
assert!(canary.load(atomic::Acquire) == 1);
736+
assert!(canary.load(Acquire) == 1);
736737
}
737738

738739
#[test]
739740
fn drop_arc_weak() {
740741
let mut canary = atomic::AtomicUint::new(0);
741742
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
742743
let arc_weak = arc.downgrade();
743-
assert!(canary.load(atomic::Acquire) == 0);
744+
assert!(canary.load(Acquire) == 0);
744745
drop(arc);
745-
assert!(canary.load(atomic::Acquire) == 1);
746+
assert!(canary.load(Acquire) == 1);
746747
drop(arc_weak);
747748
}
748749

src/libcore/atomic.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
1313
#![stable]
1414

15-
pub use self::Ordering::*;
15+
use self::Ordering::*;
1616

1717
use kinds::Sync;
1818

src/libstd/sync/atomic.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,9 @@ use core::mem;
101101
use core::prelude::{Send, Drop, None, Option, Some};
102102

103103
pub use core::atomic::{AtomicBool, AtomicInt, AtomicUint, AtomicPtr};
104-
pub use core::atomic::{Ordering, Relaxed, Release, Acquire, AcqRel, SeqCst};
105104
pub use core::atomic::{INIT_ATOMIC_BOOL, INIT_ATOMIC_INT, INIT_ATOMIC_UINT};
106105
pub use core::atomic::fence;
106+
pub use core::atomic::Ordering::{mod, Relaxed, Release, Acquire, AcqRel, SeqCst};
107107

108108
/// An atomic, nullable unique pointer
109109
///

0 commit comments

Comments
 (0)