Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 6 pull requests #129174

Closed
wants to merge 11 commits into from
Closed
64 changes: 27 additions & 37 deletions compiler/rustc_middle/src/hir/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -554,53 +554,43 @@ impl<'hir> Map<'hir> {
/// }
/// ```
pub fn get_fn_id_for_return_block(self, id: HirId) -> Option<HirId> {
let mut iter = self.parent_iter(id).peekable();
let mut ignore_tail = false;
if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = self.tcx.hir_node(id) {
// When dealing with `return` statements, we don't care about climbing only tail
// expressions.
ignore_tail = true;
}
let enclosing_body_owner = self.tcx.local_def_id_to_hir_id(self.enclosing_body_owner(id));

// Return `None` if the `id` expression is not the returned value of the enclosing body
let mut iter = [id].into_iter().chain(self.parent_id_iter(id)).peekable();
while let Some(cur_id) = iter.next() {
if enclosing_body_owner == cur_id {
break;
}

// A return statement is always the value returned from the enclosing body regardless of
// what the parent expressions are.
if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = self.tcx.hir_node(cur_id) {
break;
}

let mut prev_hir_id = None;
while let Some((hir_id, node)) = iter.next() {
if let (Some((_, next_node)), false) = (iter.peek(), ignore_tail) {
match next_node {
Node::Block(Block { expr: None, .. }) => return None,
// The current node is not the tail expression of its parent.
Node::Block(Block { expr: Some(e), .. }) if hir_id != e.hir_id => return None,
// If the current expression's value doesnt get used as the parent expressions value then return `None`
if let Some(&parent_id) = iter.peek() {
match self.tcx.hir_node(parent_id) {
// The current node is not the tail expression of the block expression parent expr.
Node::Block(Block { expr: Some(e), .. }) if cur_id != e.hir_id => return None,
Node::Block(Block { expr: Some(e), .. })
if matches!(e.kind, ExprKind::If(_, _, None)) =>
{
return None;
}

// The current expression's value does not pass up through these parent expressions
Node::Block(Block { expr: None, .. })
| Node::Expr(Expr { kind: ExprKind::Loop(..), .. })
| Node::LetStmt(..) => return None,

_ => {}
}
}
match node {
Node::Item(_)
| Node::ForeignItem(_)
| Node::TraitItem(_)
| Node::Expr(Expr { kind: ExprKind::Closure(_), .. })
| Node::ImplItem(_)
// The input node `id` must be enclosed in the method's body as opposed
// to some other place such as its return type (fixes #114918).
// We verify that indirectly by checking that the previous node is the
// current node's body
if node.body_id().map(|b| b.hir_id) == prev_hir_id => {
return Some(hir_id)
}
// Ignore `return`s on the first iteration
Node::Expr(Expr { kind: ExprKind::Loop(..) | ExprKind::Ret(..), .. })
| Node::LetStmt(_) => {
return None;
}
_ => {}
}

prev_hir_id = Some(hir_id);
}
None

Some(enclosing_body_owner)
}

/// Retrieves the `OwnerId` for `id`'s parent item, or `id` itself if no
Expand Down
18 changes: 9 additions & 9 deletions compiler/rustc_middle/src/mir/pretty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1418,21 +1418,19 @@ pub fn write_allocations<'tcx>(
alloc.inner().provenance().ptrs().values().map(|p| p.alloc_id())
}

fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
fn alloc_id_from_const_val(val: ConstValue<'_>) -> Option<AllocId> {
match val {
ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => {
Either::Left(std::iter::once(ptr.provenance.alloc_id()))
}
ConstValue::Scalar(interpret::Scalar::Int { .. }) => Either::Right(std::iter::empty()),
ConstValue::ZeroSized => Either::Right(std::iter::empty()),
ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => Some(ptr.provenance.alloc_id()),
ConstValue::Scalar(interpret::Scalar::Int { .. }) => None,
ConstValue::ZeroSized => None,
ConstValue::Slice { .. } => {
// `u8`/`str` slices, shouldn't contain pointers that we want to print.
Either::Right(std::iter::empty())
None
}
ConstValue::Indirect { alloc_id, .. } => {
// FIXME: we don't actually want to print all of these, since some are printed nicely directly as values inline in MIR.
// Really we'd want `pretty_print_const_value` to decide which allocations to print, instead of having a separate visitor.
Either::Left(std::iter::once(alloc_id))
Some(alloc_id)
}
}
}
Expand All @@ -1443,7 +1441,9 @@ pub fn write_allocations<'tcx>(
match c.const_ {
Const::Ty(_, _) | Const::Unevaluated(..) => {}
Const::Val(val, _) => {
self.0.extend(alloc_ids_from_const_val(val));
if let Some(id) = alloc_id_from_const_val(val) {
self.0.insert(id);
}
}
}
}
Expand Down
127 changes: 22 additions & 105 deletions library/core/src/clone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@

#![stable(feature = "rust1", since = "1.0.0")]

use crate::mem::{self, MaybeUninit};
use crate::ptr;
mod uninit;

/// A common trait for the ability to explicitly duplicate an object.
///
Expand Down Expand Up @@ -248,7 +247,7 @@ pub unsafe trait CloneToUninit {
/// * `dst` must be properly aligned.
/// * `dst` must have the same [pointer metadata] (slice length or `dyn` vtable) as `self`.
///
/// [valid]: ptr#safety
/// [valid]: crate::ptr#safety
/// [pointer metadata]: crate::ptr::metadata()
///
/// # Panics
Expand All @@ -272,124 +271,42 @@ pub unsafe trait CloneToUninit {

#[unstable(feature = "clone_to_uninit", issue = "126799")]
unsafe impl<T: Clone> CloneToUninit for T {
default unsafe fn clone_to_uninit(&self, dst: *mut Self) {
// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
// ptr::write().
unsafe {
// We hope the optimizer will figure out to create the cloned value in-place,
// skipping ever storing it on the stack and the copy to the destination.
ptr::write(dst, self.clone());
}
}
}

// Specialized implementation for types that are [`Copy`], not just [`Clone`],
// and can therefore be copied bitwise.
#[unstable(feature = "clone_to_uninit", issue = "126799")]
unsafe impl<T: Copy> CloneToUninit for T {
#[inline]
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
// ptr::copy_nonoverlapping().
unsafe {
ptr::copy_nonoverlapping(self, dst, 1);
}
// SAFETY: we're calling a specialization with the same contract
unsafe { <T as self::uninit::CopySpec>::clone_one(self, dst) }
}
}

#[unstable(feature = "clone_to_uninit", issue = "126799")]
unsafe impl<T: Clone> CloneToUninit for [T] {
#[inline]
#[cfg_attr(debug_assertions, track_caller)]
default unsafe fn clone_to_uninit(&self, dst: *mut Self) {
let len = self.len();
// This is the most likely mistake to make, so check it as a debug assertion.
debug_assert_eq!(
len,
dst.len(),
"clone_to_uninit() source and destination must have equal lengths",
);

// SAFETY: The produced `&mut` is valid because:
// * The caller is obligated to provide a pointer which is valid for writes.
// * All bytes pointed to are in MaybeUninit, so we don't care about the memory's
// initialization status.
let uninit_ref = unsafe { &mut *(dst as *mut [MaybeUninit<T>]) };

// Copy the elements
let mut initializing = InitializingSlice::from_fully_uninit(uninit_ref);
for element_ref in self.iter() {
// If the clone() panics, `initializing` will take care of the cleanup.
initializing.push(element_ref.clone());
}
// If we reach here, then the entire slice is initialized, and we've satisfied our
// responsibilities to the caller. Disarm the cleanup guard by forgetting it.
mem::forget(initializing);
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
// SAFETY: we're calling a specialization with the same contract
unsafe { <T as self::uninit::CopySpec>::clone_slice(self, dst) }
}
}

#[unstable(feature = "clone_to_uninit", issue = "126799")]
unsafe impl<T: Copy> CloneToUninit for [T] {
unsafe impl CloneToUninit for str {
#[inline]
#[cfg_attr(debug_assertions, track_caller)]
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
let len = self.len();
// This is the most likely mistake to make, so check it as a debug assertion.
debug_assert_eq!(
len,
dst.len(),
"clone_to_uninit() source and destination must have equal lengths",
);

// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
// ptr::copy_nonoverlapping().
unsafe {
ptr::copy_nonoverlapping(self.as_ptr(), dst.as_mut_ptr(), len);
}
// SAFETY: str is just a [u8] with UTF-8 invariant
unsafe { self.as_bytes().clone_to_uninit(dst as *mut [u8]) }
}
}

/// Ownership of a collection of values stored in a non-owned `[MaybeUninit<T>]`, some of which
/// are not yet initialized. This is sort of like a `Vec` that doesn't own its allocation.
/// Its responsibility is to provide cleanup on unwind by dropping the values that *are*
/// initialized, unless disarmed by forgetting.
///
/// This is a helper for `impl<T: Clone> CloneToUninit for [T]`.
struct InitializingSlice<'a, T> {
data: &'a mut [MaybeUninit<T>],
/// Number of elements of `*self.data` that are initialized.
initialized_len: usize,
}

impl<'a, T> InitializingSlice<'a, T> {
#[inline]
fn from_fully_uninit(data: &'a mut [MaybeUninit<T>]) -> Self {
Self { data, initialized_len: 0 }
}

/// Push a value onto the end of the initialized part of the slice.
///
/// # Panics
///
/// Panics if the slice is already fully initialized.
#[inline]
fn push(&mut self, value: T) {
MaybeUninit::write(&mut self.data[self.initialized_len], value);
self.initialized_len += 1;
}
}

impl<'a, T> Drop for InitializingSlice<'a, T> {
#[cold] // will only be invoked on unwind
fn drop(&mut self) {
let initialized_slice = ptr::slice_from_raw_parts_mut(
MaybeUninit::slice_as_mut_ptr(self.data),
self.initialized_len,
);
// SAFETY:
// * the pointer is valid because it was made from a mutable reference
// * `initialized_len` counts the initialized elements as an invariant of this type,
// so each of the pointed-to elements is initialized and may be dropped.
unsafe {
ptr::drop_in_place::<[T]>(initialized_slice);
}
#[unstable(feature = "clone_to_uninit", issue = "126799")]
unsafe impl CloneToUninit for crate::ffi::CStr {
#[cfg_attr(debug_assertions, track_caller)]
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
// SAFETY: For now, CStr is just a #[repr(trasnsparent)] [c_char] with some invariants.
// And we can cast [c_char] to [u8] on all supported platforms (see: to_bytes_with_nul).
// The pointer metadata properly preserves the length (NUL included).
// See: `cstr_metadata_is_length_with_nul` in tests.
unsafe { self.to_bytes_with_nul().clone_to_uninit(dst as *mut [u8]) }
}
}

Expand Down
Loading
Loading