Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Implement DST coercions. #23785

Closed
wants to merge 12 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/liballoc/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ use core::ops::{Deref, DerefMut};
use core::ptr::Unique;
use core::raw::TraitObject;

#[cfg(not(stage0))] // SNAP c64d671
use core::marker::Unsize;
#[cfg(not(stage0))] // SNAP c64d671
use core::ops::CoerceUnsized;

/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
///
Expand Down Expand Up @@ -327,3 +332,6 @@ impl<'a, E: Error + 'a> FromError<E> for Box<Error + 'a> {
Box::new(err)
}
}

#[cfg(not(stage0))] // SNAP c64d671
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
169 changes: 142 additions & 27 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ use core::cmp::{PartialEq, PartialOrd, Eq, Ord, Ordering};
use core::default::Default;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::marker;
use core::marker::{self, Sized};
use core::mem::{min_align_of, size_of, forget};
use core::nonzero::NonZero;
use core::ops::{Deref, Drop};
Expand All @@ -170,28 +170,40 @@ use core::result::Result;
use core::result::Result::{Ok, Err};
use core::intrinsics::assume;

#[cfg(not(stage0))] // SNAP c64d671
use core::intrinsics::drop_in_place;
#[cfg(not(stage0))] // SNAP c64d671
use core::marker::Unsize;
#[cfg(not(stage0))] // SNAP c64d671
use core::mem::{min_align_of_val, size_of_val};
#[cfg(not(stage0))] // SNAP c64d671
use core::ops::CoerceUnsized;

use heap::deallocate;

struct RcBox<T> {
value: T,
struct RcBox<T: ?Sized> {
strong: Cell<usize>,
weak: Cell<usize>
weak: Cell<usize>,
value: T
}

/// A reference-counted pointer type over an immutable value.
///
/// See the [module level documentation](./index.html) for more details.
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T> {
pub struct Rc<T: ?Sized> {
// FIXME #12808: strange names to try to avoid interfering with field
// accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
}

impl<T> !marker::Send for Rc<T> {}
impl<T: ?Sized> !marker::Send for Rc<T> {}

impl<T: ?Sized> !marker::Sync for Rc<T> {}

impl<T> !marker::Sync for Rc<T> {}
#[cfg(not(stage0))] // SNAP c64d671
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}

impl<T> Rc<T> {
/// Constructs a new `Rc<T>`.
Expand All @@ -212,14 +224,16 @@ impl<T> Rc<T> {
// the allocation while the strong destructor is running, even
// if the weak pointer is stored inside the strong one.
_ptr: NonZero::new(boxed::into_raw(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
weak: Cell::new(1),
value: value
})),
}
}
}
}

impl<T: ?Sized> Rc<T> {
/// Downgrades the `Rc<T>` to a `Weak<T>` reference.
///
/// # Examples
Expand All @@ -243,12 +257,12 @@ impl<T> Rc<T> {
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T>(this: &Rc<T>) -> usize { this.weak() - 1 }
pub fn weak_count<T: ?Sized>(this: &Rc<T>) -> usize { this.weak() - 1 }

/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T>(this: &Rc<T>) -> usize { this.strong() }
pub fn strong_count<T: ?Sized>(this: &Rc<T>) -> usize { this.strong() }

/// Returns true if there are no other `Rc` or `Weak<T>` values that share the
/// same inner value.
Expand Down Expand Up @@ -366,7 +380,7 @@ impl<T: Clone> Rc<T> {
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Deref for Rc<T> {
impl<T: ?Sized> Deref for Rc<T> {
type Target = T;

#[inline(always)]
Expand All @@ -375,6 +389,7 @@ impl<T> Deref for Rc<T> {
}
}

#[cfg(stage0)] // SNAP c64d671
#[unsafe_destructor]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Rc<T> {
Expand Down Expand Up @@ -426,8 +441,61 @@ impl<T> Drop for Rc<T> {
}
}

#[cfg(not(stage0))] // SNAP c64d671
#[unsafe_destructor]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Drop for Rc<T> {
/// Drops the `Rc<T>`.
///
/// This will decrement the strong reference count. If the strong reference
/// count becomes zero and the only other references are `Weak<T>` ones,
/// `drop`s the inner value.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// {
/// let five = Rc::new(5);
///
/// // stuff
///
/// drop(five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
drop_in_place(&mut (*ptr).value);

// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.dec_weak();

if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
min_align_of_val(&*ptr))
}
}
}
}
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Rc<T> {
impl<T: ?Sized> Clone for Rc<T> {

/// Makes a clone of the `Rc<T>`.
///
Expand Down Expand Up @@ -613,21 +681,21 @@ impl<T: Ord> Ord for Rc<T> {

// FIXME (#18248) Make `T` `Sized?`
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash> Hash for Rc<T> {
impl<T: ?Sized+Hash> Hash for Rc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display> fmt::Display for Rc<T> {
impl<T: ?Sized+fmt::Display> fmt::Display for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Rc<T> {
impl<T: ?Sized+fmt::Debug> fmt::Debug for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
Expand All @@ -642,20 +710,20 @@ impl<T: fmt::Debug> fmt::Debug for Rc<T> {
#[unsafe_no_drop_flag]
#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
pub struct Weak<T> {
pub struct Weak<T: ?Sized> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
}

impl<T> !marker::Send for Weak<T> {}
impl<T: ?Sized> !marker::Send for Weak<T> {}

impl<T> !marker::Sync for Weak<T> {}
impl<T: ?Sized> !marker::Sync for Weak<T> {}


#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Weak<T> {
impl<T: ?Sized> Weak<T> {

/// Upgrades a weak reference to a strong reference.
///
Expand Down Expand Up @@ -686,6 +754,7 @@ impl<T> Weak<T> {
}
}

#[cfg(stage0)] // SNAP c64d671
#[unsafe_destructor]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Drop for Weak<T> {
Expand Down Expand Up @@ -731,9 +800,55 @@ impl<T> Drop for Weak<T> {
}
}

#[cfg(not(stage0))] // SNAP c64d671
#[unsafe_destructor]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// # #![feature(alloc)]
/// use std::rc::Rc;
///
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
min_align_of_val(&*ptr))
}
}
}
}
}

#[unstable(feature = "alloc",
reason = "Weak pointers may not belong in this module.")]
impl<T> Clone for Weak<T> {
impl<T: ?Sized> Clone for Weak<T> {

/// Makes a clone of the `Weak<T>`.
///
Expand All @@ -757,14 +872,14 @@ impl<T> Clone for Weak<T> {
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug> fmt::Debug for Weak<T> {
impl<T: ?Sized+fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "(Weak)")
}
}

#[doc(hidden)]
trait RcBoxPtr<T> {
trait RcBoxPtr<T: ?Sized> {
fn inner(&self) -> &RcBox<T>;

#[inline]
Expand All @@ -786,29 +901,29 @@ trait RcBoxPtr<T> {
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
}

impl<T> RcBoxPtr<T> for Rc<T> {
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!self._ptr.is_null());
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
&(**self._ptr)
}
}
}

impl<T> RcBoxPtr<T> for Weak<T> {
impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
// Safe to assume this here, as if it weren't true, we'd be breaking
// the contract anyway.
// This allows the null check to be elided in the destructor if we
// manipulated the reference count in the same function.
assume(!self._ptr.is_null());
assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
&(**self._ptr)
}
}
Expand Down
9 changes: 8 additions & 1 deletion src/libcore/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,15 @@ extern "rust-intrinsic" {
pub fn min_align_of<T>() -> usize;
pub fn pref_align_of<T>() -> usize;

#[cfg(not(stage0))] // SNAP c64d671
pub fn size_of_val<T: ?Sized>(_: &T) -> usize;
#[cfg(not(stage0))] // SNAP c64d671
pub fn min_align_of_val<T: ?Sized>(_: &T) -> usize;
#[cfg(not(stage0))] // SNAP c64d671
pub fn drop_in_place<T: ?Sized>(_: *mut T);

/// Gets a static string slice containing the name of a type.
#[cfg(not(stage0))]
#[cfg(not(stage0))] // SNAP c64d671
pub fn type_name<T: ?Sized>() -> &'static str;

/// Gets an identifier which is globally unique to the specified type. This
Expand Down
8 changes: 8 additions & 0 deletions src/libcore/marker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,14 @@ pub trait Sized : MarkerTrait {
// Empty.
}

/// Types that can be "unsized" to a dynamically sized type.
#[unstable(feature = "core")]
#[cfg(not(stage0))] // SNAP c64d671
#[lang="unsize"]
pub trait Unsize<T> : PhantomFn<Self, T> {
// Empty.
}

/// Types that can be copied by simply copying bits (i.e. `memcpy`).
///
/// By default, variable bindings have 'move semantics.' In other
Expand Down
Loading