Skip to content

Paper over privacy issues with Deref by changing field names. #14402

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 25, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 19 additions & 15 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ use heap::deallocate;
/// ```
#[unsafe_no_drop_flag]
pub struct Arc<T> {
x: *mut ArcInner<T>,
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
}

/// A weak pointer to an `Arc`.
Expand All @@ -63,7 +65,9 @@ pub struct Arc<T> {
/// used to break cycles between `Arc` pointers.
#[unsafe_no_drop_flag]
pub struct Weak<T> {
x: *mut ArcInner<T>,
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
}

struct ArcInner<T> {
Expand All @@ -83,7 +87,7 @@ impl<T: Share + Send> Arc<T> {
weak: atomics::AtomicUint::new(1),
data: data,
};
Arc { x: unsafe { mem::transmute(x) } }
Arc { _ptr: unsafe { mem::transmute(x) } }
}

#[inline]
Expand All @@ -93,7 +97,7 @@ impl<T: Share + Send> Arc<T> {
// `ArcInner` structure itself is `Share` because the inner data is
// `Share` as well, so we're ok loaning out an immutable pointer to
// these contents.
unsafe { &*self.x }
unsafe { &*self._ptr }
}

/// Downgrades a strong pointer to a weak pointer
Expand All @@ -104,7 +108,7 @@ impl<T: Share + Send> Arc<T> {
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, atomics::Relaxed);
Weak { x: self.x }
Weak { _ptr: self._ptr }
}
}

Expand All @@ -128,7 +132,7 @@ impl<T: Share + Send> Clone for Arc<T> {
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, atomics::Relaxed);
Arc { x: self.x }
Arc { _ptr: self._ptr }
}
}

Expand Down Expand Up @@ -166,7 +170,7 @@ impl<T: Share + Send> Drop for Arc<T> {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
if self.x.is_null() { return }
if self._ptr.is_null() { return }

// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
Expand Down Expand Up @@ -198,7 +202,7 @@ impl<T: Share + Send> Drop for Arc<T> {

if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
atomics::fence(atomics::Acquire);
unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand All @@ -218,14 +222,14 @@ impl<T: Share + Send> Weak<T> {
let n = inner.strong.load(atomics::SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, atomics::SeqCst);
if old == n { return Some(Arc { x: self.x }) }
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}

#[inline]
fn inner<'a>(&'a self) -> &'a ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &*self.x }
unsafe { &*self._ptr }
}
}

Expand All @@ -234,22 +238,22 @@ impl<T: Share + Send> Clone for Weak<T> {
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, atomics::Relaxed);
Weak { x: self.x }
Weak { _ptr: self._ptr }
}
}

#[unsafe_destructor]
impl<T: Share + Send> Drop for Weak<T> {
fn drop(&mut self) {
// see comments above for why this check is here
if self.x.is_null() { return }
if self._ptr.is_null() { return }

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
atomics::fence(atomics::Acquire);
unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
Expand All @@ -261,7 +265,7 @@ mod tests {
use std::clone::Clone;
use std::comm::channel;
use std::mem::drop;
use std::ops::{Drop, Deref, DerefMut};
use std::ops::Drop;
use std::option::{Option, Some, None};
use std::sync::atomics;
use std::task;
Expand Down Expand Up @@ -374,7 +378,7 @@ mod tests {

let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
*a.deref().x.lock().deref_mut() = Some(b);
*a.x.lock() = Some(b);

// hopefully we don't double-free (or leak)...
}
Expand Down
46 changes: 25 additions & 21 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,11 @@ struct RcBox<T> {
/// Immutable reference counted pointer type
#[unsafe_no_drop_flag]
pub struct Rc<T> {
ptr: *mut RcBox<T>,
nosend: marker::NoSend,
noshare: marker::NoShare
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}

impl<T> Rc<T> {
Expand All @@ -60,13 +62,13 @@ impl<T> Rc<T> {
// destructor never frees the allocation while the
// strong destructor is running, even if the weak
// pointer is stored inside the strong one.
ptr: transmute(box RcBox {
_ptr: transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
}),
nosend: marker::NoSend,
noshare: marker::NoShare
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
}
}
Expand All @@ -77,9 +79,9 @@ impl<T> Rc<T> {
pub fn downgrade(&self) -> Weak<T> {
self.inc_weak();
Weak {
ptr: self.ptr,
nosend: marker::NoSend,
noshare: marker::NoShare
_ptr: self._ptr,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}
}
}
Expand All @@ -96,7 +98,7 @@ impl<T> Deref<T> for Rc<T> {
impl<T> Drop for Rc<T> {
fn drop(&mut self) {
unsafe {
if !self.ptr.is_null() {
if !self._ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
ptr::read(self.deref()); // destroy the contained object
Expand All @@ -106,7 +108,7 @@ impl<T> Drop for Rc<T> {
self.dec_weak();

if self.weak() == 0 {
deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand All @@ -119,7 +121,7 @@ impl<T> Clone for Rc<T> {
#[inline]
fn clone(&self) -> Rc<T> {
self.inc_strong();
Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
}
}

Expand Down Expand Up @@ -154,9 +156,11 @@ impl<T: TotalOrd> TotalOrd for Rc<T> {
/// Weak reference to a reference-counted box
#[unsafe_no_drop_flag]
pub struct Weak<T> {
ptr: *mut RcBox<T>,
nosend: marker::NoSend,
noshare: marker::NoShare
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_nosend: marker::NoSend,
_noshare: marker::NoShare
}

impl<T> Weak<T> {
Expand All @@ -166,7 +170,7 @@ impl<T> Weak<T> {
None
} else {
self.inc_strong();
Some(Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare })
Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
}
}
}
Expand All @@ -175,12 +179,12 @@ impl<T> Weak<T> {
impl<T> Drop for Weak<T> {
fn drop(&mut self) {
unsafe {
if !self.ptr.is_null() {
if !self._ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to
// zero if all the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
Expand All @@ -192,7 +196,7 @@ impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
self.inc_weak();
Weak { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
}
}

Expand Down Expand Up @@ -221,12 +225,12 @@ trait RcBoxPtr<T> {

impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
}

impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
}

#[cfg(test)]
Expand Down
32 changes: 18 additions & 14 deletions src/libcore/cell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ impl<T> RefCell<T> {
WRITING => None,
borrow => {
self.borrow.set(borrow + 1);
Some(Ref { parent: self })
Some(Ref { _parent: self })
}
}
}
Expand Down Expand Up @@ -281,7 +281,7 @@ impl<T> RefCell<T> {
match self.borrow.get() {
UNUSED => {
self.borrow.set(WRITING);
Some(RefMut { parent: self })
Some(RefMut { _parent: self })
},
_ => None
}
Expand Down Expand Up @@ -317,22 +317,24 @@ impl<T: Eq> Eq for RefCell<T> {

/// Wraps a borrowed reference to a value in a `RefCell` box.
pub struct Ref<'b, T> {
parent: &'b RefCell<T>
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_parent: &'b RefCell<T>
}

#[unsafe_destructor]
impl<'b, T> Drop for Ref<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
let borrow = self._parent.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED);
self.parent.borrow.set(borrow - 1);
self._parent.borrow.set(borrow - 1);
}
}

impl<'b, T> Deref<T> for Ref<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
unsafe { &*self._parent.value.get() }
}
}

Expand All @@ -346,40 +348,42 @@ impl<'b, T> Deref<T> for Ref<'b, T> {
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
let borrow = orig.parent.borrow.get();
let borrow = orig._parent.borrow.get();
debug_assert!(borrow != WRITING && borrow != UNUSED);
orig.parent.borrow.set(borrow + 1);
orig._parent.borrow.set(borrow + 1);

Ref {
parent: orig.parent,
_parent: orig._parent,
}
}

/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
pub struct RefMut<'b, T> {
parent: &'b RefCell<T>
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_parent: &'b RefCell<T>
}

#[unsafe_destructor]
impl<'b, T> Drop for RefMut<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
let borrow = self._parent.borrow.get();
debug_assert!(borrow == WRITING);
self.parent.borrow.set(UNUSED);
self._parent.borrow.set(UNUSED);
}
}

impl<'b, T> Deref<T> for RefMut<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
unsafe { &*self._parent.value.get() }
}
}

impl<'b, T> DerefMut<T> for RefMut<'b, T> {
#[inline]
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.parent.value.get() }
unsafe { &mut *self._parent.value.get() }
}
}

Expand Down
Loading