Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 9 pull requests #61303

Closed
wants to merge 34 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
a35cdd4
Implement `iter::Sum` and `iter::Product` for `Option`
jtdowney Mar 6, 2019
8c69876
Update stable attribute to be since 1.35.0
Centril Mar 19, 2019
422a4c0
Add improved doc example for Sum<Option<T>>
jtdowney Mar 21, 2019
4d9b9e9
wip nth_back on chunks
Apr 26, 2019
73ca8bc
hopefully working nth_back on chunks
May 1, 2019
5eb0e08
Implement nth_back for RChunks(Exact)(Mut)
timvermeulen May 5, 2019
02a148d
wip nth_back on chunks
Apr 26, 2019
aff83c8
hopefully working nth_back on chunks
May 1, 2019
16223e4
new implementation for nth_back for chunks
wizAmit May 14, 2019
fd15405
fixed merge conflicts
wizAmit May 14, 2019
dc82626
wip nth_back on chunks
Apr 26, 2019
2080b86
hopefully working nth_back on chunks
May 1, 2019
29a103d
wip nth_back on chunks
Apr 26, 2019
9309447
succint implementation
wizAmit May 22, 2019
bcfd1f3
fix merge conflicts
wizAmit May 22, 2019
f1d0829
Add Step::sub_usize
timvermeulen May 4, 2019
f9d328d
sync::Weak::{as,from,into}_raw
vorner May 12, 2019
4f1dcb3
rc::Weak::{as,from,into}_raw
vorner May 19, 2019
96e3fb2
librustc_errors: Move annotation collection to own impl
phansch May 25, 2019
0631d19
Avoid unneeded bug!() call
spastorino May 28, 2019
120ce12
Rename `TraitOrImpl` to `Assoc` and `trait_or_impl` to `assoc`.
eddyb May 28, 2019
03d3290
rustc_codegen_ssa: remove obsolete codegen stats.
eddyb May 28, 2019
29b7c06
rustc_codegen_llvm: remove LLVM instruction count stats.
eddyb May 28, 2019
7fa97c0
rustc_codegen_llvm: rename away the last occurrence of `insn`.
eddyb May 28, 2019
9f8d934
Bump Sum and Product for Option<T> to 1.37
dtolnay May 28, 2019
8e3f003
Rollup merge of #58975 - jtdowney:iter_arith_traits_option, r=dtolnay
Centril May 29, 2019
85c975b
Rollup merge of #60542 - timvermeulen:step_sub_usize, r=scottmcm
Centril May 29, 2019
966e354
Rollup merge of #60555 - timvermeulen:rchunks_nth_back, r=scottmcm
Centril May 29, 2019
c98841b
Rollup merge of #60766 - vorner:weak-into-raw, r=sfackler
Centril May 29, 2019
aad084a
Rollup merge of #61048 - wizAmit:feature/nth_back_chunks, r=scottmcm
Centril May 29, 2019
cd38c8f
Rollup merge of #61191 - phansch:annotate_snippet_refactorings1, r=es…
Centril May 29, 2019
e0bb093
Rollup merge of #61291 - spastorino:avoid-unneeded-bug-call, r=estebank
Centril May 29, 2019
2af35bf
Rollup merge of #61294 - eddyb:no-trait-nor-impl, r=varkor
Centril May 29, 2019
c0142ac
Rollup merge of #61297 - eddyb:forsaken-stats, r=nagisa
Centril May 29, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 156 additions & 6 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::marker::{self, Unpin, Unsize, PhantomData};
use core::mem::{self, align_of_val, forget, size_of_val};
use core::mem::{self, align_of, align_of_val, forget, size_of_val};
use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
use core::pin::Pin;
use core::ptr::{self, NonNull};
Expand Down Expand Up @@ -416,11 +416,7 @@ impl<T: ?Sized> Rc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// Align the unsized value to the end of the RcBox.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<RcBox<()>>();
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
let offset = data_offset(ptr);

// Reverse the offset to find the original RcBox.
let fake_ptr = ptr as *mut RcBox<T>;
Expand Down Expand Up @@ -1262,6 +1258,143 @@ impl<T> Weak<T> {
ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
}
}

/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
///
/// It is up to the caller to ensure that the object is still alive when accessing it through
/// the pointer.
///
/// The pointer may be [`null`] or be dangling in case the object has already been destroyed.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::rc::{Rc, Weak};
/// use std::ptr;
///
/// let strong = Rc::new(42);
/// let weak = Rc::downgrade(&strong);
/// // Both point to the same object
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
/// // The strong here keeps it alive, so we can still access the object.
/// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
///
/// drop(strong);
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
/// // undefined behaviour.
/// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub fn as_raw(this: &Self) -> *const T {
match this.inner() {
None => ptr::null(),
Some(inner) => {
let offset = data_offset_sized::<T>();
let ptr = inner as *const RcBox<T>;
// Note: while the pointer we create may already point to dropped value, the
// allocation still lives (it must hold the weak point as long as we are alive).
// Therefore, the offset is OK to do, it won't get out of the allocation.
let ptr = unsafe { (ptr as *const u8).offset(offset) };
ptr as *const T
}
}
}

/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, preserving the original weak count. It
/// can be turned back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_raw`] apply.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::rc::{Rc, Weak};
///
/// let strong = Rc::new(42);
/// let weak = Rc::downgrade(&strong);
/// let raw = Weak::into_raw(weak);
///
/// assert_eq!(1, Rc::weak_count(&strong));
/// assert_eq!(42, unsafe { *raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Rc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: struct.Weak.html#method.from_raw
/// [`as_raw`]: struct.Weak.html#method.as_raw
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub fn into_raw(this: Self) -> *const T {
let result = Self::as_raw(&this);
mem::forget(this);
result
}

/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
///
/// This can be used to safely get a strong reference (by calling [`upgrade`]
/// later) or to deallocate the weak count by dropping the `Weak<T>`.
///
/// It takes ownership of one weak count. In case a [`null`] is passed, a dangling [`Weak`] is
/// returned.
///
/// # Safety
///
/// The pointer must represent one valid weak count. In other words, it must point to `T` which
/// is or *was* managed by an [`Rc`] and the weak count of that [`Rc`] must not have reached
/// 0. It is allowed for the strong count to be 0.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::rc::{Rc, Weak};
///
/// let strong = Rc::new(42);
///
/// let raw_1 = Weak::into_raw(Rc::downgrade(&strong));
/// let raw_2 = Weak::into_raw(Rc::downgrade(&strong));
///
/// assert_eq!(2, Rc::weak_count(&strong));
///
/// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
/// assert_eq!(1, Rc::weak_count(&strong));
///
/// drop(strong);
///
/// // Decrement the last weak count.
/// assert!(Weak::upgrade(&unsafe { Weak::from_raw(raw_2) }).is_none());
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
/// [`into_raw`]: struct.Weak.html#method.into_raw
/// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`Rc`]: struct.Rc.html
/// [`Weak`]: struct.Weak.html
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
if ptr.is_null() {
Self::new()
} else {
// See Rc::from_raw for details
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut RcBox<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak {
ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
}
}
}
}

pub(crate) fn is_dangling<T: ?Sized>(ptr: NonNull<T>) -> bool {
Expand Down Expand Up @@ -2007,3 +2140,20 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {

#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized> Unpin for Rc<T> { }

unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Align the unsized value to the end of the RcBox.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<RcBox<()>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}

/// Computes the offset of the data field within ArcInner.
///
/// Unlike [`data_offset`], this doesn't need the pointer, but it works only on `T: Sized`.
fn data_offset_sized<T>() -> isize {
let align = align_of::<T>();
let layout = Layout::new::<RcBox<()>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}
164 changes: 158 additions & 6 deletions src/liballoc/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use core::borrow;
use core::fmt;
use core::cmp::{self, Ordering};
use core::intrinsics::abort;
use core::mem::{self, align_of_val, size_of_val};
use core::mem::{self, align_of, align_of_val, size_of_val};
use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
use core::pin::Pin;
use core::ptr::{self, NonNull};
Expand Down Expand Up @@ -397,11 +397,7 @@ impl<T: ?Sized> Arc<T> {
/// ```
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// Align the unsized value to the end of the ArcInner.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<ArcInner<()>>();
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
let offset = data_offset(ptr);

// Reverse the offset to find the original ArcInner.
let fake_ptr = ptr as *mut ArcInner<T>;
Expand Down Expand Up @@ -1071,6 +1067,144 @@ impl<T> Weak<T> {
ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0"),
}
}

/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
///
/// It is up to the caller to ensure that the object is still alive when accessing it through
/// the pointer.
///
/// The pointer may be [`null`] or be dangling in case the object has already been destroyed.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::sync::{Arc, Weak};
/// use std::ptr;
///
/// let strong = Arc::new(42);
/// let weak = Arc::downgrade(&strong);
/// // Both point to the same object
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
/// // The strong here keeps it alive, so we can still access the object.
/// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
///
/// drop(strong);
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
/// // undefined behaviour.
/// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub fn as_raw(this: &Self) -> *const T {
match this.inner() {
None => ptr::null(),
Some(inner) => {
let offset = data_offset_sized::<T>();
let ptr = inner as *const ArcInner<T>;
// Note: while the pointer we create may already point to dropped value, the
// allocation still lives (it must hold the weak point as long as we are alive).
// Therefore, the offset is OK to do, it won't get out of the allocation.
let ptr = unsafe { (ptr as *const u8).offset(offset) };
ptr as *const T
}
}
}

/// Consumes the `Weak<T>` and turns it into a raw pointer.
///
/// This converts the weak pointer into a raw pointer, preserving the original weak count. It
/// can be turned back into the `Weak<T>` with [`from_raw`].
///
/// The same restrictions of accessing the target of the pointer as with
/// [`as_raw`] apply.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new(42);
/// let weak = Arc::downgrade(&strong);
/// let raw = Weak::into_raw(weak);
///
/// assert_eq!(1, Arc::weak_count(&strong));
/// assert_eq!(42, unsafe { *raw });
///
/// drop(unsafe { Weak::from_raw(raw) });
/// assert_eq!(0, Arc::weak_count(&strong));
/// ```
///
/// [`from_raw`]: struct.Weak.html#method.from_raw
/// [`as_raw`]: struct.Weak.html#method.as_raw
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub fn into_raw(this: Self) -> *const T {
let result = Self::as_raw(&this);
mem::forget(this);
result
}

/// Converts a raw pointer previously created by [`into_raw`] back into
/// `Weak<T>`.
///
/// This can be used to safely get a strong reference (by calling [`upgrade`]
/// later) or to deallocate the weak count by dropping the `Weak<T>`.
///
/// It takes ownership of one weak count. In case a [`null`] is passed, a dangling [`Weak`] is
/// returned.
///
/// # Safety
///
/// The pointer must represent one valid weak count. In other words, it must point to `T` which
/// is or *was* managed by an [`Arc`] and the weak count of that [`Arc`] must not have reached
/// 0. It is allowed for the strong count to be 0.
///
/// # Examples
///
/// ```
/// #![feature(weak_into_raw)]
///
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new(42);
///
/// let raw_1 = Weak::into_raw(Arc::downgrade(&strong));
/// let raw_2 = Weak::into_raw(Arc::downgrade(&strong));
///
/// assert_eq!(2, Arc::weak_count(&strong));
///
/// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
/// assert_eq!(1, Arc::weak_count(&strong));
///
/// drop(strong);
///
/// // Decrement the last weak count.
/// assert!(Weak::upgrade(&unsafe { Weak::from_raw(raw_2) }).is_none());
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
/// [`into_raw`]: struct.Weak.html#method.into_raw
/// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`Weak`]: struct.Weak.html
/// [`Arc`]: struct.Arc.html
#[unstable(feature = "weak_into_raw", issue = "60728")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
if ptr.is_null() {
Self::new()
} else {
// See Arc::from_raw for details
let offset = data_offset(ptr);
let fake_ptr = ptr as *mut ArcInner<T>;
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
Weak {
ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
}
}
}
}

impl<T: ?Sized> Weak<T> {
Expand Down Expand Up @@ -2150,3 +2284,21 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {

#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized> Unpin for Arc<T> { }

/// Computes the offset of the data field within ArcInner.
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
// Align the unsized value to the end of the ArcInner.
// Because it is ?Sized, it will always be the last field in memory.
let align = align_of_val(&*ptr);
let layout = Layout::new::<ArcInner<()>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}

/// Computes the offset of the data field within ArcInner.
///
/// Unlike [`data_offset`], this doesn't need the pointer, but it works only on `T: Sized`.
fn data_offset_sized<T>() -> isize {
let align = align_of::<T>();
let layout = Layout::new::<ArcInner<()>>();
(layout.size() + layout.padding_needed_for(align)) as isize
}
Loading