Skip to content

Commit 0936b58

Browse files
committed
Remove strange names created by lack of privacy-conscious name lookup
The fixed issue that allowed this was #12808.
1 parent 7fd331e commit 0936b58

File tree

3 files changed

+74
-86
lines changed

3 files changed

+74
-86
lines changed

src/liballoc/arc.rs

+20-24
Original file line numberDiff line numberDiff line change
@@ -124,9 +124,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
124124
#[unsafe_no_drop_flag]
125125
#[stable(feature = "rust1", since = "1.0.0")]
126126
pub struct Arc<T: ?Sized> {
127-
// FIXME #12808: strange name to try to avoid interfering with
128-
// field accesses of the contained type via Deref
129-
_ptr: Shared<ArcInner<T>>,
127+
ptr: Shared<ArcInner<T>>,
130128
}
131129

132130
#[stable(feature = "rust1", since = "1.0.0")]
@@ -144,9 +142,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
144142
#[unsafe_no_drop_flag]
145143
#[stable(feature = "arc_weak", since = "1.4.0")]
146144
pub struct Weak<T: ?Sized> {
147-
// FIXME #12808: strange name to try to avoid interfering with
148-
// field accesses of the contained type via Deref
149-
_ptr: Shared<ArcInner<T>>,
145+
ptr: Shared<ArcInner<T>>,
150146
}
151147

152148
#[stable(feature = "arc_weak", since = "1.4.0")]
@@ -198,7 +194,7 @@ impl<T> Arc<T> {
198194
weak: atomic::AtomicUsize::new(1),
199195
data: data,
200196
};
201-
Arc { _ptr: unsafe { Shared::new(Box::into_raw(x)) } }
197+
Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
202198
}
203199

204200
/// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
@@ -230,11 +226,11 @@ impl<T> Arc<T> {
230226
atomic::fence(Acquire);
231227

232228
unsafe {
233-
let ptr = *this._ptr;
229+
let ptr = *this.ptr;
234230
let elem = ptr::read(&(*ptr).data);
235231

236232
// Make a weak pointer to clean up the implicit strong-weak reference
237-
let _weak = Weak { _ptr: this._ptr };
233+
let _weak = Weak { ptr: this.ptr };
238234
mem::forget(this);
239235

240236
Ok(elem)
@@ -274,7 +270,7 @@ impl<T: ?Sized> Arc<T> {
274270
// synchronize with the write coming from `is_unique`, so that the
275271
// events prior to that write happen before this read.
276272
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
277-
Ok(_) => return Weak { _ptr: this._ptr },
273+
Ok(_) => return Weak { ptr: this.ptr },
278274
Err(old) => cur = old,
279275
}
280276
}
@@ -303,13 +299,13 @@ impl<T: ?Sized> Arc<T> {
303299
// `ArcInner` structure itself is `Sync` because the inner data is
304300
// `Sync` as well, so we're ok loaning out an immutable pointer to these
305301
// contents.
306-
unsafe { &**self._ptr }
302+
unsafe { &**self.ptr }
307303
}
308304

309305
// Non-inlined part of `drop`.
310306
#[inline(never)]
311307
unsafe fn drop_slow(&mut self) {
312-
let ptr = *self._ptr;
308+
let ptr = *self.ptr;
313309

314310
// Destroy the data at this time, even though we may not free the box
315311
// allocation itself (there may still be weak pointers lying around).
@@ -367,7 +363,7 @@ impl<T: ?Sized> Clone for Arc<T> {
367363
}
368364
}
369365

370-
Arc { _ptr: self._ptr }
366+
Arc { ptr: self.ptr }
371367
}
372368
}
373369

@@ -435,15 +431,15 @@ impl<T: Clone> Arc<T> {
435431

436432
// Materialize our own implicit weak pointer, so that it can clean
437433
// up the ArcInner as needed.
438-
let weak = Weak { _ptr: this._ptr };
434+
let weak = Weak { ptr: this.ptr };
439435

440436
// mark the data itself as already deallocated
441437
unsafe {
442438
// there is no data race in the implicit write caused by `read`
443439
// here (due to zeroing) because data is no longer accessed by
444440
// other threads (due to there being no more strong refs at this
445441
// point).
446-
let mut swap = Arc::new(ptr::read(&(**weak._ptr).data));
442+
let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
447443
mem::swap(this, &mut swap);
448444
mem::forget(swap);
449445
}
@@ -456,7 +452,7 @@ impl<T: Clone> Arc<T> {
456452
// As with `get_mut()`, the unsafety is ok because our reference was
457453
// either unique to begin with, or became one upon cloning the contents.
458454
unsafe {
459-
let inner = &mut **this._ptr;
455+
let inner = &mut **this.ptr;
460456
&mut inner.data
461457
}
462458
}
@@ -488,7 +484,7 @@ impl<T: ?Sized> Arc<T> {
488484
// the Arc itself to be `mut`, so we're returning the only possible
489485
// reference to the inner data.
490486
unsafe {
491-
let inner = &mut **this._ptr;
487+
let inner = &mut **this.ptr;
492488
Some(&mut inner.data)
493489
}
494490
} else {
@@ -557,7 +553,7 @@ impl<T: ?Sized> Drop for Arc<T> {
557553
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
558554
// more than once (but it is guaranteed to be zeroed after the first if
559555
// it's run more than once)
560-
let thin = *self._ptr as *const ();
556+
let thin = *self.ptr as *const ();
561557

562558
if thin as usize == mem::POST_DROP_USIZE {
563559
return;
@@ -638,7 +634,7 @@ impl<T: ?Sized> Weak<T> {
638634

639635
// Relaxed is valid for the same reason it is on Arc's Clone impl
640636
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
641-
Ok(_) => return Some(Arc { _ptr: self._ptr }),
637+
Ok(_) => return Some(Arc { ptr: self.ptr }),
642638
Err(old) => n = old,
643639
}
644640
}
@@ -647,7 +643,7 @@ impl<T: ?Sized> Weak<T> {
647643
#[inline]
648644
fn inner(&self) -> &ArcInner<T> {
649645
// See comments above for why this is "safe"
650-
unsafe { &**self._ptr }
646+
unsafe { &**self.ptr }
651647
}
652648
}
653649

@@ -681,7 +677,7 @@ impl<T: ?Sized> Clone for Weak<T> {
681677
}
682678
}
683679

684-
return Weak { _ptr: self._ptr };
680+
return Weak { ptr: self.ptr };
685681
}
686682
}
687683

@@ -713,7 +709,7 @@ impl<T: ?Sized> Drop for Weak<T> {
713709
/// } // implicit drop
714710
/// ```
715711
fn drop(&mut self) {
716-
let ptr = *self._ptr;
712+
let ptr = *self.ptr;
717713
let thin = ptr as *const ();
718714

719715
// see comments above for why this check is here
@@ -885,7 +881,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
885881
#[stable(feature = "rust1", since = "1.0.0")]
886882
impl<T: ?Sized> fmt::Pointer for Arc<T> {
887883
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
888-
fmt::Pointer::fmt(&*self._ptr, f)
884+
fmt::Pointer::fmt(&*self.ptr, f)
889885
}
890886
}
891887

@@ -930,7 +926,7 @@ impl<T> Weak<T> {
930926
issue = "30425")]
931927
pub fn new() -> Weak<T> {
932928
unsafe {
933-
Weak { _ptr: Shared::new(Box::into_raw(box ArcInner {
929+
Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
934930
strong: atomic::AtomicUsize::new(0),
935931
weak: atomic::AtomicUsize::new(1),
936932
data: uninitialized(),

src/liballoc/rc.rs

+19-23
Original file line numberDiff line numberDiff line change
@@ -184,9 +184,7 @@ struct RcBox<T: ?Sized> {
184184
#[unsafe_no_drop_flag]
185185
#[stable(feature = "rust1", since = "1.0.0")]
186186
pub struct Rc<T: ?Sized> {
187-
// FIXME #12808: strange names to try to avoid interfering with field
188-
// accesses of the contained type via Deref
189-
_ptr: Shared<RcBox<T>>,
187+
ptr: Shared<RcBox<T>>,
190188
}
191189

192190
#[stable(feature = "rust1", since = "1.0.0")]
@@ -215,7 +213,7 @@ impl<T> Rc<T> {
215213
// pointers, which ensures that the weak destructor never frees
216214
// the allocation while the strong destructor is running, even
217215
// if the weak pointer is stored inside the strong one.
218-
_ptr: Shared::new(Box::into_raw(box RcBox {
216+
ptr: Shared::new(Box::into_raw(box RcBox {
219217
strong: Cell::new(1),
220218
weak: Cell::new(1),
221219
value: value,
@@ -254,7 +252,7 @@ impl<T> Rc<T> {
254252
// pointer while also handling drop logic by just crafting a
255253
// fake Weak.
256254
this.dec_strong();
257-
let _weak = Weak { _ptr: this._ptr };
255+
let _weak = Weak { ptr: this.ptr };
258256
forget(this);
259257
Ok(val)
260258
}
@@ -287,7 +285,7 @@ impl<T: ?Sized> Rc<T> {
287285
#[stable(feature = "rc_weak", since = "1.4.0")]
288286
pub fn downgrade(this: &Self) -> Weak<T> {
289287
this.inc_weak();
290-
Weak { _ptr: this._ptr }
288+
Weak { ptr: this.ptr }
291289
}
292290

293291
/// Get the number of weak references to this value.
@@ -348,7 +346,7 @@ impl<T: ?Sized> Rc<T> {
348346
#[stable(feature = "rc_unique", since = "1.4.0")]
349347
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
350348
if Rc::is_unique(this) {
351-
let inner = unsafe { &mut **this._ptr };
349+
let inner = unsafe { &mut **this.ptr };
352350
Some(&mut inner.value)
353351
} else {
354352
None
@@ -390,7 +388,7 @@ impl<T: Clone> Rc<T> {
390388
} else if Rc::weak_count(this) != 0 {
391389
// Can just steal the data, all that's left is Weaks
392390
unsafe {
393-
let mut swap = Rc::new(ptr::read(&(**this._ptr).value));
391+
let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
394392
mem::swap(this, &mut swap);
395393
swap.dec_strong();
396394
// Remove implicit strong-weak ref (no need to craft a fake
@@ -404,7 +402,7 @@ impl<T: Clone> Rc<T> {
404402
// reference count is guaranteed to be 1 at this point, and we required
405403
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
406404
// reference to the inner value.
407-
let inner = unsafe { &mut **this._ptr };
405+
let inner = unsafe { &mut **this.ptr };
408406
&mut inner.value
409407
}
410408
}
@@ -449,7 +447,7 @@ impl<T: ?Sized> Drop for Rc<T> {
449447
#[unsafe_destructor_blind_to_params]
450448
fn drop(&mut self) {
451449
unsafe {
452-
let ptr = *self._ptr;
450+
let ptr = *self.ptr;
453451
let thin = ptr as *const ();
454452

455453
if thin as usize != mem::POST_DROP_USIZE {
@@ -490,7 +488,7 @@ impl<T: ?Sized> Clone for Rc<T> {
490488
#[inline]
491489
fn clone(&self) -> Rc<T> {
492490
self.inc_strong();
493-
Rc { _ptr: self._ptr }
491+
Rc { ptr: self.ptr }
494492
}
495493
}
496494

@@ -691,7 +689,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
691689
#[stable(feature = "rust1", since = "1.0.0")]
692690
impl<T: ?Sized> fmt::Pointer for Rc<T> {
693691
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
694-
fmt::Pointer::fmt(&*self._ptr, f)
692+
fmt::Pointer::fmt(&*self.ptr, f)
695693
}
696694
}
697695

@@ -711,9 +709,7 @@ impl<T> From<T> for Rc<T> {
711709
#[unsafe_no_drop_flag]
712710
#[stable(feature = "rc_weak", since = "1.4.0")]
713711
pub struct Weak<T: ?Sized> {
714-
// FIXME #12808: strange names to try to avoid interfering with
715-
// field accesses of the contained type via Deref
716-
_ptr: Shared<RcBox<T>>,
712+
ptr: Shared<RcBox<T>>,
717713
}
718714

719715
#[stable(feature = "rc_weak", since = "1.4.0")]
@@ -749,7 +745,7 @@ impl<T: ?Sized> Weak<T> {
749745
None
750746
} else {
751747
self.inc_strong();
752-
Some(Rc { _ptr: self._ptr })
748+
Some(Rc { ptr: self.ptr })
753749
}
754750
}
755751
}
@@ -783,7 +779,7 @@ impl<T: ?Sized> Drop for Weak<T> {
783779
/// ```
784780
fn drop(&mut self) {
785781
unsafe {
786-
let ptr = *self._ptr;
782+
let ptr = *self.ptr;
787783
let thin = ptr as *const ();
788784

789785
if thin as usize != mem::POST_DROP_USIZE {
@@ -816,7 +812,7 @@ impl<T: ?Sized> Clone for Weak<T> {
816812
#[inline]
817813
fn clone(&self) -> Weak<T> {
818814
self.inc_weak();
819-
Weak { _ptr: self._ptr }
815+
Weak { ptr: self.ptr }
820816
}
821817
}
822818

@@ -848,7 +844,7 @@ impl<T> Weak<T> {
848844
pub fn new() -> Weak<T> {
849845
unsafe {
850846
Weak {
851-
_ptr: Shared::new(Box::into_raw(box RcBox {
847+
ptr: Shared::new(Box::into_raw(box RcBox {
852848
strong: Cell::new(0),
853849
weak: Cell::new(1),
854850
value: uninitialized(),
@@ -910,8 +906,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
910906
// the contract anyway.
911907
// This allows the null check to be elided in the destructor if we
912908
// manipulated the reference count in the same function.
913-
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
914-
&(**self._ptr)
909+
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
910+
&(**self.ptr)
915911
}
916912
}
917913
}
@@ -924,8 +920,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
924920
// the contract anyway.
925921
// This allows the null check to be elided in the destructor if we
926922
// manipulated the reference count in the same function.
927-
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
928-
&(**self._ptr)
923+
assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
924+
&(**self.ptr)
929925
}
930926
}
931927
}

0 commit comments

Comments
 (0)