diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 34296b6d8de34..514b78ead5efc 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -7,6 +7,7 @@ use std::borrow::Cow; use std::fmt::Write; use std::hash::Hash; +use std::mem; use std::num::NonZero; use either::{Left, Right}; @@ -288,6 +289,7 @@ struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> { /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa: /// we might not track data vs padding bytes if the operand isn't stored in memory anyway). data_bytes: Option, + may_dangle: bool, } impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { @@ -503,27 +505,29 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { // alignment and size determined by the layout (size will be 0, // alignment should take attributes into account). .unwrap_or_else(|| (place.layout.size, place.layout.align.abi)); - // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines. - try_validation!( - self.ecx.check_ptr_access( - place.ptr(), - size, - CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message - ), - self.path, - Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind, maybe: false }, - Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance { - ptr_kind, - // FIXME this says "null pointer" when null but we need translate - pointer: format!("{}", Pointer::>::without_provenance(i)) - }, - Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds { - ptr_kind - }, - Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree { - ptr_kind, - }, - ); + if !self.may_dangle { + // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines. + try_validation!( + self.ecx.check_ptr_access( + place.ptr(), + size, + CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message + ), + self.path, + Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind, maybe: false }, + Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance { + ptr_kind, + // FIXME this says "null pointer" when null but we need translate + pointer: format!("{}", Pointer::>::without_provenance(i)) + }, + Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds { + ptr_kind + }, + Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree { + ptr_kind, + }, + ); + } try_validation!( self.ecx.check_ptr_align( place.ptr(), @@ -536,6 +540,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { found_bytes: has.bytes() }, ); + // Make sure this is non-null. We checked dereferenceability above, but if `size` is zero // that does not imply non-null. let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx); @@ -1269,6 +1274,14 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, ty::PatternKind::Or(_patterns) => {} } } + ty::Adt(adt, _) if adt.is_maybe_dangling() => { + let could_dangle = mem::replace(&mut self.may_dangle, true); + + let inner = self.ecx.project_field(val, FieldIdx::ZERO)?; + self.visit_value(&inner)?; + + self.may_dangle = could_dangle; + } _ => { // default handler try_validation!( @@ -1354,6 +1367,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ecx, reset_provenance_and_padding, data_bytes: reset_padding.then_some(RangeSet(Vec::new())), + may_dangle: false, }; v.visit_value(val)?; v.reset_padding(val)?; diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 4ac3e4e83e80a..1cbed284bc628 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -353,7 +353,8 @@ language_item_table! { PhantomData, sym::phantom_data, phantom_data, Target::Struct, GenericRequirement::Exact(1); - ManuallyDrop, sym::manually_drop, manually_drop, Target::Struct, GenericRequirement::None; + ManuallyDrop, sym::manually_drop, manually_drop, Target::Struct, GenericRequirement::Exact(1); + MaybeDangling, sym::maybe_dangling, maybe_dangling, Target::Struct, GenericRequirement::Exact(1); BikeshedGuaranteedNoDrop, sym::bikeshed_guaranteed_no_drop, bikeshed_guaranteed_no_drop, Target::Trait, GenericRequirement::Exact(0); MaybeUninit, sym::maybe_uninit, maybe_uninit, Target::Union, GenericRequirement::None; diff --git a/compiler/rustc_middle/src/ty/adt.rs b/compiler/rustc_middle/src/ty/adt.rs index 510c546f82a4e..c4d841e77508a 100644 --- a/compiler/rustc_middle/src/ty/adt.rs +++ b/compiler/rustc_middle/src/ty/adt.rs @@ -59,6 +59,8 @@ bitflags::bitflags! { const IS_PIN = 1 << 11; /// Indicates whether the type is `#[pin_project]`. const IS_PIN_PROJECT = 1 << 12; + /// Indicates whether the type is `MaybeDangling<_>`. + const IS_MAYBE_DANGLING = 1 << 13; } } rustc_data_structures::external_bitflags_debug! { AdtFlags } @@ -315,6 +317,9 @@ impl AdtDefData { if tcx.is_lang_item(did, LangItem::ManuallyDrop) { flags |= AdtFlags::IS_MANUALLY_DROP; } + if tcx.is_lang_item(did, LangItem::MaybeDangling) { + flags |= AdtFlags::IS_MAYBE_DANGLING; + } if tcx.is_lang_item(did, LangItem::UnsafeCell) { flags |= AdtFlags::IS_UNSAFE_CELL; } @@ -439,6 +444,12 @@ impl<'tcx> AdtDef<'tcx> { self.flags().contains(AdtFlags::IS_MANUALLY_DROP) } + /// Returns `true` if this is `MaybeDangling`. + #[inline] + pub fn is_maybe_dangling(self) -> bool { + self.flags().contains(AdtFlags::IS_MAYBE_DANGLING) + } + /// Returns `true` if this is `Pin`. #[inline] pub fn is_pin(self) -> bool { diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index d0fd2f02a336a..f36f8f5accf3b 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -1053,6 +1053,12 @@ where }) } + ty::Adt(adt_def, ..) if adt_def.is_maybe_dangling() => { + // FIXME: what is the exact effect of maybe dangling? + Self::ty_and_layout_pointee_info_at(this.field(cx, 0), cx, offset) + .map(|info| PointeeInfo { safe: None, ..info }) + } + _ => { let mut data_variant = match &this.variants { // Within the discriminant field, only the niche itself is @@ -1091,7 +1097,7 @@ where } } Variants::Multiple { .. } => None, - _ => Some(this), + Variants::Empty | Variants::Single { .. } => Some(this), }; if let Some(variant) = data_variant diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index c9aaba6cce3bc..41ccd40e4f31b 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1403,6 +1403,7 @@ symbols! { maxnumf128, may_dangle, may_unwind, + maybe_dangling, maybe_uninit, maybe_uninit_uninit, maybe_uninit_zeroed, diff --git a/library/core/src/mem/manually_drop.rs b/library/core/src/mem/manually_drop.rs index 7d49da8509577..74f917bcf1cc5 100644 --- a/library/core/src/mem/manually_drop.rs +++ b/library/core/src/mem/manually_drop.rs @@ -1,4 +1,7 @@ -use crate::marker::Destruct; +use crate::cmp::Ordering; +use crate::hash::{Hash, Hasher}; +use crate::marker::{Destruct, StructuralPartialEq}; +use crate::mem::MaybeDangling; use crate::ops::{Deref, DerefMut, DerefPure}; use crate::ptr; @@ -152,11 +155,11 @@ use crate::ptr; /// [`MaybeUninit`]: crate::mem::MaybeUninit #[stable(feature = "manually_drop", since = "1.20.0")] #[lang = "manually_drop"] -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Copy, Clone, Debug, Default)] #[repr(transparent)] #[rustc_pub_transparent] pub struct ManuallyDrop { - value: T, + value: MaybeDangling, } impl ManuallyDrop { @@ -179,7 +182,7 @@ impl ManuallyDrop { #[rustc_const_stable(feature = "const_manually_drop", since = "1.32.0")] #[inline(always)] pub const fn new(value: T) -> ManuallyDrop { - ManuallyDrop { value } + ManuallyDrop { value: MaybeDangling::new(value) } } /// Extracts the value from the `ManuallyDrop` container. @@ -197,7 +200,7 @@ impl ManuallyDrop { #[rustc_const_stable(feature = "const_manually_drop", since = "1.32.0")] #[inline(always)] pub const fn into_inner(slot: ManuallyDrop) -> T { - slot.value + slot.value.into_inner() } /// Takes the value from the `ManuallyDrop` container out. @@ -222,7 +225,7 @@ impl ManuallyDrop { pub const unsafe fn take(slot: &mut ManuallyDrop) -> T { // SAFETY: we are reading from a reference, which is guaranteed // to be valid for reads. - unsafe { ptr::read(&slot.value) } + unsafe { ptr::read(slot.value.as_ref()) } } } @@ -259,7 +262,7 @@ impl ManuallyDrop { // SAFETY: we are dropping the value pointed to by a mutable reference // which is guaranteed to be valid for writes. // It is up to the caller to make sure that `slot` isn't dropped again. - unsafe { ptr::drop_in_place(&mut slot.value) } + unsafe { ptr::drop_in_place(slot.value.as_mut()) } } } @@ -269,7 +272,7 @@ impl const Deref for ManuallyDrop { type Target = T; #[inline(always)] fn deref(&self) -> &T { - &self.value + self.value.as_ref() } } @@ -278,9 +281,43 @@ impl const Deref for ManuallyDrop { impl const DerefMut for ManuallyDrop { #[inline(always)] fn deref_mut(&mut self) -> &mut T { - &mut self.value + self.value.as_mut() } } #[unstable(feature = "deref_pure_trait", issue = "87121")] unsafe impl DerefPure for ManuallyDrop {} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl Eq for ManuallyDrop {} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl PartialEq for ManuallyDrop { + fn eq(&self, other: &Self) -> bool { + self.value.as_ref().eq(other.value.as_ref()) + } +} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl StructuralPartialEq for ManuallyDrop {} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl Ord for ManuallyDrop { + fn cmp(&self, other: &Self) -> Ordering { + self.value.as_ref().cmp(other.value.as_ref()) + } +} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl PartialOrd for ManuallyDrop { + fn partial_cmp(&self, other: &Self) -> Option { + self.value.as_ref().partial_cmp(other.value.as_ref()) + } +} + +#[stable(feature = "manually_drop", since = "1.20.0")] +impl Hash for ManuallyDrop { + fn hash(&self, state: &mut H) { + self.value.as_ref().hash(state); + } +} diff --git a/library/core/src/mem/maybe_dangling.rs b/library/core/src/mem/maybe_dangling.rs new file mode 100644 index 0000000000000..6b050927ec810 --- /dev/null +++ b/library/core/src/mem/maybe_dangling.rs @@ -0,0 +1,110 @@ +#![unstable(feature = "maybe_dangling", issue = "118166")] + +use crate::{mem, ptr}; + +/// Allows wrapped [references] and [boxes] to dangle. +/// +/// That is, if a reference (or a `Box`) is wrapped in `MaybeDangling` (including when in a +/// (nested) field of a compound type wrapped in `MaybeDangling`), it does not have to follow +/// pointer aliasing rules or be dereferenceable. +/// +/// This can be useful when the value can become dangling while the function holding it is still +/// executing (particularly in concurrent code). As a somewhat absurd example, consider this code: +/// +/// ```rust,no_run +/// #![feature(box_as_ptr)] +/// # use std::alloc::{dealloc, Layout}; +/// # use std::mem; +/// +/// let mut boxed = Box::new(0_u32); +/// let ptr = Box::as_mut_ptr(&mut boxed); +/// +/// // Safety: the pointer comes from a box and thus was allocated before; `box` is not used afterwards +/// unsafe { dealloc(ptr.cast(), Layout::new::()) }; +/// +/// mem::forget(boxed); // <-- this is UB! +/// ``` +/// +/// Even though the `Box`e's destructor is not run (and thus we don't have a double free bug), this +/// code is still UB. This is because when moving `boxed` into `forget`, its validity invariants +/// are asserted, causing UB since the `Box` is dangling. The safety comment is as such wrong, as +/// moving the `boxed` variable as part of the `forget` call *is* a use. +/// +/// To fix this we could use `MaybeDangling`: +/// +/// ```rust +/// #![feature(maybe_dangling, box_as_ptr)] +/// # use std::alloc::{dealloc, Layout}; +/// # use std::mem::{self, MaybeDangling}; +/// +/// let mut boxed = MaybeDangling::new(Box::new(0_u32)); +/// let ptr = Box::as_mut_ptr(boxed.as_mut()); +/// +/// // Safety: the pointer comes from a box and thus was allocated before; `box` is not used afterwards +/// unsafe { dealloc(ptr.cast(), Layout::new::()) }; +/// +/// mem::forget(boxed); // <-- this is OK! +/// ``` +/// +/// Note that the bit pattern must still be valid for the wrapped type. That is, [references] +/// (and [boxes]) still must be aligned and non-null. +/// +/// Additionally note that safe code can still assume that the inner value in a `MaybeDangling` is +/// **not** dangling -- functions like [`as_ref`] and [`into_inner`] are safe. It is not sound to +/// return a dangling reference in a `MaybeDangling` to safe code. However, it *is* sound +/// to hold such values internally inside your code -- and there's no way to do that without +/// this type. Note that other types can use this type and thus get the same effect; in particular, +/// [`ManuallyDrop`] will use `MaybeDangling`. +/// +/// Note that `MaybeDangling` doesn't prevent drops from being run, which can lead to UB if the +/// drop observes a dangling value. If you need to prevent drops from being run use [`ManuallyDrop`] +/// instead. +/// +/// [references]: prim@reference +/// [boxes]: ../../std/boxed/struct.Box.html +/// [`into_inner`]: MaybeDangling::into_inner +/// [`as_ref`]: MaybeDangling::as_ref +/// [`ManuallyDrop`]: crate::mem::ManuallyDrop +#[repr(transparent)] +#[rustc_pub_transparent] +#[derive(Debug, Copy, Clone, Default)] +#[lang = "maybe_dangling"] +pub struct MaybeDangling(P); + +impl MaybeDangling

{ + /// Wraps a value in a `MaybeDangling`, allowing it to dangle. + pub const fn new(x: P) -> Self + where + P: Sized, + { + MaybeDangling(x) + } + + /// Returns a reference to the inner value. + /// + /// Note that this is UB if the inner value is currently dangling. + pub const fn as_ref(&self) -> &P { + &self.0 + } + + /// Returns a mutable reference to the inner value. + /// + /// Note that this is UB if the inner value is currently dangling. + pub const fn as_mut(&mut self) -> &mut P { + &mut self.0 + } + + /// Extracts the value from the `MaybeDangling` container. + /// + /// Note that this is UB if the inner value is currently dangling. + pub const fn into_inner(self) -> P + where + P: Sized, + { + // FIXME: replace this with `self.0` when const checker can figure out that `self` isn't actually dropped + // SAFETY: this is equivalent to `self.0` + let x = unsafe { ptr::read(&self.0) }; + mem::forget(self); + x + } +} diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index ad5fda0cfe4db..4f7edce1e977f 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -19,6 +19,10 @@ mod maybe_uninit; #[stable(feature = "maybe_uninit", since = "1.36.0")] pub use maybe_uninit::MaybeUninit; +mod maybe_dangling; +#[unstable(feature = "maybe_dangling", issue = "118166")] +pub use maybe_dangling::MaybeDangling; + mod transmutability; #[unstable(feature = "transmutability", issue = "99571")] pub use transmutability::{Assume, TransmuteFrom}; diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 8fb1b1b05d20c..4abf1b24ed9c6 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -344,6 +344,7 @@ #![feature(int_from_ascii)] #![feature(ip)] #![feature(lazy_get)] +#![feature(maybe_dangling)] #![feature(maybe_uninit_array_assume_init)] #![feature(panic_can_unwind)] #![feature(panic_internals)] diff --git a/library/std/src/thread/lifecycle.rs b/library/std/src/thread/lifecycle.rs index a48594c606a35..b4fff231ceb71 100644 --- a/library/std/src/thread/lifecycle.rs +++ b/library/std/src/thread/lifecycle.rs @@ -7,7 +7,7 @@ use super::thread::Thread; use super::{Result, spawnhook}; use crate::cell::UnsafeCell; use crate::marker::PhantomData; -use crate::mem::{ManuallyDrop, MaybeUninit}; +use crate::mem::MaybeDangling; use crate::sync::Arc; use crate::sync::atomic::{Atomic, AtomicUsize, Ordering}; use crate::sys::thread as imp; @@ -58,29 +58,8 @@ where Arc::new(Packet { scope: scope_data, result: UnsafeCell::new(None), _marker: PhantomData }); let their_packet = my_packet.clone(); - // Pass `f` in `MaybeUninit` because actually that closure might *run longer than the lifetime of `F`*. + // Pass `f` in `MaybeDangling` because actually that closure might *run longer than the lifetime of `F`*. // See for more details. - // To prevent leaks we use a wrapper that drops its contents. - #[repr(transparent)] - struct MaybeDangling(MaybeUninit); - impl MaybeDangling { - fn new(x: T) -> Self { - MaybeDangling(MaybeUninit::new(x)) - } - fn into_inner(self) -> T { - // Make sure we don't drop. - let this = ManuallyDrop::new(self); - // SAFETY: we are always initialized. - unsafe { this.0.assume_init_read() } - } - } - impl Drop for MaybeDangling { - fn drop(&mut self) { - // SAFETY: we are always initialized. - unsafe { self.0.assume_init_drop() }; - } - } - let f = MaybeDangling::new(f); // The entrypoint of the Rust thread, after platform-specific thread diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 676842f66240d..b0b6682f5279e 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -298,7 +298,7 @@ def cast_to_internal(node): for i in xrange(0, length + 1): if height > 0: - child_ptr = edges[i]["value"]["value"] + child_ptr = edges[i]["value"]["value"][ZERO_FIELD] for child in children_of_node(child_ptr, height - 1): yield child if i < length: @@ -306,12 +306,12 @@ def cast_to_internal(node): key_type_size = keys.type.sizeof val_type_size = vals.type.sizeof key = ( - keys[i]["value"]["value"] + keys[i]["value"]["value"][ZERO_FIELD] if key_type_size > 0 else gdb.parse_and_eval("()") ) val = ( - vals[i]["value"]["value"] + vals[i]["value"]["value"][ZERO_FIELD] if val_type_size > 0 else gdb.parse_and_eval("()") ) diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs index e8d97491acaf3..11b5845eb0b3a 100644 --- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs @@ -892,7 +892,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { RetagKind::FnEntry => RetagCause::FnEntry, RetagKind::Default | RetagKind::Raw => RetagCause::Normal, }; - let mut visitor = RetagVisitor { ecx: this, kind, retag_cause, in_field: false }; + let mut visitor = + RetagVisitor { ecx: this, kind, retag_cause, in_field: false, may_dangle: false }; return visitor.visit_value(place); // The actual visitor. @@ -901,6 +902,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { kind: RetagKind, retag_cause: RetagCause, in_field: bool, + may_dangle: bool, } impl<'ecx, 'tcx> RetagVisitor<'ecx, 'tcx> { #[inline(always)] // yes this helps in our benchmarks @@ -909,13 +911,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { place: &PlaceTy<'tcx>, new_perm: NewPermission, ) -> InterpResult<'tcx> { - let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?; - let val = self.ecx.sb_retag_reference( - &val, - new_perm, - RetagInfo { cause: self.retag_cause, in_field: self.in_field }, - )?; - self.ecx.write_immediate(*val, place)?; + if !self.may_dangle { + let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?; + let val = self.ecx.sb_retag_reference( + &val, + new_perm, + RetagInfo { cause: self.retag_cause, in_field: self.in_field }, + )?; + self.ecx.write_immediate(*val, place)?; + } + interp_ok(()) } } @@ -963,6 +968,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // even if field retagging is not enabled. *shrug*) self.walk_value(place)?; } + ty::Adt(adt, _) if adt.is_maybe_dangling() => { + let in_field = mem::replace(&mut self.in_field, true); // remember and restore old value + let may_dangle = mem::replace(&mut self.may_dangle, true); // remember and restore old value + self.walk_value(place)?; + self.may_dangle = may_dangle; + self.in_field = in_field; + } _ => { // Not a reference/pointer/box. Recurse. let in_field = mem::replace(&mut self.in_field, true); // remember and restore old value diff --git a/src/tools/miri/tests/pass/move_manually_drop.rs b/src/tools/miri/tests/pass/move_manually_drop.rs new file mode 100644 index 0000000000000..746f9b6cb90b4 --- /dev/null +++ b/src/tools/miri/tests/pass/move_manually_drop.rs @@ -0,0 +1,7 @@ +use std::mem::ManuallyDrop; + +fn main() { + let mut x = ManuallyDrop::new(Box::new(1)); + unsafe { ManuallyDrop::drop(&mut x) }; + let _x = x; // move +} diff --git a/src/tools/miri/tests/pass/stacked_borrows/stack-printing.stdout b/src/tools/miri/tests/pass/stacked_borrows/stack-printing.stdout index 296339e738455..de7da309271da 100644 --- a/src/tools/miri/tests/pass/stacked_borrows/stack-printing.stdout +++ b/src/tools/miri/tests/pass/stacked_borrows/stack-printing.stdout @@ -1,6 +1,6 @@ 0..1: [ SharedReadWrite ] 0..1: [ SharedReadWrite ] 0..1: [ SharedReadWrite ] -0..1: [ SharedReadWrite Unique Unique Unique Unique Unique Unique Unique ] -0..1: [ SharedReadWrite Disabled Disabled Disabled Disabled Disabled Disabled Disabled SharedReadOnly ] +0..1: [ SharedReadWrite Unique Unique Unique Unique Unique Unique Unique Unique Unique Unique Unique ] +0..1: [ SharedReadWrite Disabled Disabled Disabled Disabled Disabled Disabled Disabled Disabled Disabled Disabled Disabled SharedReadOnly ] 0..1: [ unknown-bottom(..) ] diff --git a/tests/codegen-llvm/manually_drop_refs.rs b/tests/codegen-llvm/manually_drop_refs.rs new file mode 100644 index 0000000000000..07d28ee9e22a9 --- /dev/null +++ b/tests/codegen-llvm/manually_drop_refs.rs @@ -0,0 +1,22 @@ +//@ compile-flags: -Copt-level=3 -Zmerge-functions=disabled +#![crate_type = "lib"] + +use std::mem::ManuallyDrop; + +// CHECK: define noundef nonnull ptr @f(ptr noundef nonnull readnone returned {{(captures\(ret: address, provenance\) )?}}%x) unnamed_addr +#[no_mangle] +pub fn f(x: ManuallyDrop>) -> ManuallyDrop> { + x +} + +// CHECK: define noundef nonnull ptr @g(ptr noundef nonnull readnone returned {{(captures\(ret: address, provenance\) )?}}%x) unnamed_addr +#[no_mangle] +pub fn g(x: ManuallyDrop<&u8>) -> ManuallyDrop<&u8> { + x +} + +// CHECK: define noundef nonnull ptr @h(ptr noundef nonnull readnone returned {{(captures\(ret: address, provenance\) )?}}%x) unnamed_addr +#[no_mangle] +pub fn h(x: ManuallyDrop<&mut u8>) -> ManuallyDrop<&mut u8> { + x +} diff --git a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir index 4260ec3eaedf1..6ab4b77123069 100644 --- a/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir +++ b/tests/mir-opt/pre-codegen/loops.vec_move.PreCodegen.after.mir @@ -3,304 +3,322 @@ fn vec_move(_1: Vec) -> () { debug v => _1; let mut _0: (); - let mut _21: std::vec::IntoIter; let mut _22: std::vec::IntoIter; - let mut _23: &mut std::vec::IntoIter; - let mut _24: std::option::Option; - let mut _25: isize; - let _27: (); + let mut _23: std::vec::IntoIter; + let mut _24: &mut std::vec::IntoIter; + let mut _25: std::option::Option; + let mut _26: isize; + let _28: (); scope 1 { - debug iter => _22; - let _26: impl Sized; + debug iter => _23; + let _27: impl Sized; scope 2 { - debug x => _26; + debug x => _27; } } scope 3 (inlined as IntoIterator>::into_iter) { debug self => _1; - let _2: std::mem::ManuallyDrop>; - let mut _3: *const std::alloc::Global; - let mut _8: usize; - let mut _10: *mut impl Sized; - let mut _11: *const impl Sized; - let mut _12: usize; - let _28: &std::vec::Vec; - let mut _29: &std::mem::ManuallyDrop>; - let mut _30: &alloc::raw_vec::RawVec; - let mut _31: &std::mem::ManuallyDrop>; - let _32: &std::vec::Vec; - let mut _33: &std::mem::ManuallyDrop>; - let _34: &std::vec::Vec; - let mut _35: &std::mem::ManuallyDrop>; - let mut _36: &alloc::raw_vec::RawVec; - let mut _37: &std::mem::ManuallyDrop>; + let _3: std::mem::ManuallyDrop>; + let mut _4: *const std::alloc::Global; + let mut _9: usize; + let mut _11: *mut impl Sized; + let mut _12: *const impl Sized; + let mut _13: usize; + let _29: &std::vec::Vec; + let mut _30: &std::mem::ManuallyDrop>; + let mut _31: &alloc::raw_vec::RawVec; + let mut _32: &std::mem::ManuallyDrop>; + let _33: &std::vec::Vec; + let mut _34: &std::mem::ManuallyDrop>; + let _35: &std::vec::Vec; + let mut _36: &std::mem::ManuallyDrop>; + let mut _37: &alloc::raw_vec::RawVec; + let mut _38: &std::mem::ManuallyDrop>; scope 4 { - debug me => _2; + debug me => _3; scope 5 { - debug alloc => const ManuallyDrop:: {{ value: std::alloc::Global }}; - let _6: std::ptr::NonNull; + debug alloc => const ManuallyDrop:: {{ value: MaybeDangling::(std::alloc::Global) }}; + let _7: std::ptr::NonNull; scope 6 { - debug buf => _6; - let _7: *mut impl Sized; + debug buf => _7; + let _8: *mut impl Sized; scope 7 { - debug begin => _7; + debug begin => _8; scope 8 { - debug end => _11; - let _19: usize; + debug end => _12; + let _20: usize; scope 9 { - debug cap => _19; + debug cap => _20; } - scope 39 (inlined > as Deref>::deref) { - debug self => _37; + scope 45 (inlined > as Deref>::deref) { + debug self => _38; + scope 46 (inlined MaybeDangling::>::as_ref) { + } } - scope 40 (inlined alloc::raw_vec::RawVec::::capacity) { - debug self => _36; - let mut _38: &alloc::raw_vec::RawVecInner; - scope 41 (inlined std::mem::size_of::) { + scope 47 (inlined alloc::raw_vec::RawVec::::capacity) { + debug self => _37; + let mut _39: &alloc::raw_vec::RawVecInner; + scope 48 (inlined std::mem::size_of::) { } - scope 42 (inlined alloc::raw_vec::RawVecInner::capacity) { - debug self => _38; + scope 49 (inlined alloc::raw_vec::RawVecInner::capacity) { + debug self => _39; debug elem_size => const ::SIZE; - let mut _20: core::num::niche_types::UsizeNoHighBit; - scope 43 (inlined core::num::niche_types::UsizeNoHighBit::as_inner) { - debug self => _20; + let mut _21: core::num::niche_types::UsizeNoHighBit; + scope 50 (inlined core::num::niche_types::UsizeNoHighBit::as_inner) { + debug self => _21; } } } } - scope 25 (inlined > as Deref>::deref) { - debug self => _33; + scope 29 (inlined > as Deref>::deref) { + debug self => _34; + scope 30 (inlined MaybeDangling::>::as_ref) { + } } - scope 26 (inlined Vec::::len) { - debug self => _32; - let mut _13: bool; - scope 27 { + scope 31 (inlined Vec::::len) { + debug self => _33; + let mut _14: bool; + scope 32 { } } - scope 28 (inlined std::ptr::mut_ptr::::wrapping_byte_add) { - debug self => _7; - debug count => _12; - let mut _14: *mut u8; - let mut _18: *mut u8; - scope 29 (inlined std::ptr::mut_ptr::::cast::) { - debug self => _7; + scope 33 (inlined std::ptr::mut_ptr::::wrapping_byte_add) { + debug self => _8; + debug count => _13; + let mut _15: *mut u8; + let mut _19: *mut u8; + scope 34 (inlined std::ptr::mut_ptr::::cast::) { + debug self => _8; } - scope 30 (inlined std::ptr::mut_ptr::::wrapping_add) { - debug self => _14; - debug count => _12; - let mut _15: isize; - scope 31 (inlined std::ptr::mut_ptr::::wrapping_offset) { - debug self => _14; - debug count => _15; - let mut _16: *const u8; + scope 35 (inlined std::ptr::mut_ptr::::wrapping_add) { + debug self => _15; + debug count => _13; + let mut _16: isize; + scope 36 (inlined std::ptr::mut_ptr::::wrapping_offset) { + debug self => _15; + debug count => _16; let mut _17: *const u8; + let mut _18: *const u8; } } - scope 32 (inlined std::ptr::mut_ptr::::with_metadata_of::) { - debug self => _18; - debug meta => _5; - scope 33 (inlined std::ptr::metadata::) { - debug ptr => _5; + scope 37 (inlined std::ptr::mut_ptr::::with_metadata_of::) { + debug self => _19; + debug meta => _6; + scope 38 (inlined std::ptr::metadata::) { + debug ptr => _6; } - scope 34 (inlined std::ptr::from_raw_parts_mut::) { + scope 39 (inlined std::ptr::from_raw_parts_mut::) { } } } - scope 35 (inlined > as Deref>::deref) { - debug self => _35; + scope 40 (inlined > as Deref>::deref) { + debug self => _36; + scope 41 (inlined MaybeDangling::>::as_ref) { + } } - scope 36 (inlined Vec::::len) { - debug self => _34; - let mut _9: bool; - scope 37 { + scope 42 (inlined Vec::::len) { + debug self => _35; + let mut _10: bool; + scope 43 { } } - scope 38 (inlined #[track_caller] std::ptr::mut_ptr::::add) { - debug self => _7; - debug count => _8; + scope 44 (inlined #[track_caller] std::ptr::mut_ptr::::add) { + debug self => _8; + debug count => _9; } } - scope 24 (inlined NonNull::::as_ptr) { - debug self => _6; + scope 28 (inlined NonNull::::as_ptr) { + debug self => _7; } } - scope 17 (inlined > as Deref>::deref) { - debug self => _31; + scope 20 (inlined > as Deref>::deref) { + debug self => _32; + scope 21 (inlined MaybeDangling::>::as_ref) { + } } - scope 18 (inlined alloc::raw_vec::RawVec::::non_null) { - debug self => _30; - scope 19 (inlined alloc::raw_vec::RawVecInner::non_null::) { - let mut _4: std::ptr::NonNull; - scope 20 (inlined Unique::::cast::) { - scope 21 (inlined NonNull::::cast::) { - let mut _5: *const impl Sized; - scope 22 (inlined NonNull::::as_ptr) { + scope 22 (inlined alloc::raw_vec::RawVec::::non_null) { + debug self => _31; + scope 23 (inlined alloc::raw_vec::RawVecInner::non_null::) { + let mut _5: std::ptr::NonNull; + scope 24 (inlined Unique::::cast::) { + scope 25 (inlined NonNull::::cast::) { + let mut _6: *const impl Sized; + scope 26 (inlined NonNull::::as_ptr) { } } } - scope 23 (inlined Unique::::as_non_null_ptr) { + scope 27 (inlined Unique::::as_non_null_ptr) { } } } } - scope 11 (inlined > as Deref>::deref) { - debug self => _29; + scope 12 (inlined > as Deref>::deref) { + debug self => _30; + scope 13 (inlined MaybeDangling::>::as_ref) { + } } - scope 12 (inlined Vec::::allocator) { - debug self => _28; - scope 13 (inlined alloc::raw_vec::RawVec::::allocator) { - scope 14 (inlined alloc::raw_vec::RawVecInner::allocator) { + scope 14 (inlined Vec::::allocator) { + debug self => _29; + scope 15 (inlined alloc::raw_vec::RawVec::::allocator) { + scope 16 (inlined alloc::raw_vec::RawVecInner::allocator) { } } } - scope 15 (inlined #[track_caller] std::ptr::read::) { - debug src => _3; + scope 17 (inlined #[track_caller] std::ptr::read::) { + debug src => _4; } - scope 16 (inlined ManuallyDrop::::new) { + scope 18 (inlined ManuallyDrop::::new) { debug value => const std::alloc::Global; + scope 19 (inlined MaybeDangling::::new) { + } } } scope 10 (inlined ManuallyDrop::>::new) { debug value => _1; + let mut _2: std::mem::MaybeDangling>; + scope 11 (inlined MaybeDangling::>::new) { + } } } bb0: { - StorageLive(_21); - StorageLive(_6); + StorageLive(_22); StorageLive(_7); - StorageLive(_11); - StorageLive(_19); + StorageLive(_8); + StorageLive(_12); + StorageLive(_20); + StorageLive(_6); StorageLive(_5); - StorageLive(_4); - StorageLive(_17); - StorageLive(_2); - _2 = ManuallyDrop::> { value: copy _1 }; + StorageLive(_18); StorageLive(_3); - // DBG: _29 = &_2; - // DBG: _28 = &(_2.0: std::vec::Vec); - _3 = &raw const ((((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).2: std::alloc::Global); - StorageDead(_3); - // DBG: _31 = &_2; - // DBG: _30 = &((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec); - _4 = copy (((((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique).0: std::ptr::NonNull); - _5 = copy _4 as *const impl Sized (Transmute); - _6 = NonNull:: { pointer: copy _5 }; - _7 = copy _4 as *mut impl Sized (Transmute); + StorageLive(_2); + _2 = MaybeDangling::>(copy _1); + _3 = ManuallyDrop::> { value: move _2 }; + StorageDead(_2); + StorageLive(_4); + // DBG: _30 = &_3; + // DBG: _29 = &((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec); + _4 = &raw const (((((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).2: std::alloc::Global); + StorageDead(_4); + // DBG: _32 = &_3; + // DBG: _31 = &(((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec); + _5 = copy ((((((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).0: std::ptr::Unique).0: std::ptr::NonNull); + _6 = copy _5 as *const impl Sized (Transmute); + _7 = NonNull:: { pointer: copy _6 }; + _8 = copy _5 as *mut impl Sized (Transmute); switchInt(const ::IS_ZST) -> [0: bb1, otherwise: bb2]; } bb1: { - StorageLive(_10); - StorageLive(_8); - // DBG: _35 = &_2; - // DBG: _34 = &(_2.0: std::vec::Vec); - _8 = copy ((_2.0: std::vec::Vec).1: usize); + StorageLive(_11); StorageLive(_9); - _9 = Le(copy _8, const ::MAX_SLICE_LEN); - assume(move _9); - StorageDead(_9); - _10 = Offset(copy _7, copy _8); - _11 = copy _10 as *const impl Sized (PtrToPtr); - StorageDead(_8); + // DBG: _36 = &_3; + // DBG: _35 = &((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec); + _9 = copy (((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).1: usize); + StorageLive(_10); + _10 = Le(copy _9, const ::MAX_SLICE_LEN); + assume(move _10); StorageDead(_10); + _11 = Offset(copy _8, copy _9); + _12 = copy _11 as *const impl Sized (PtrToPtr); + StorageDead(_9); + StorageDead(_11); goto -> bb4; } bb2: { - StorageLive(_12); - // DBG: _33 = &_2; - // DBG: _32 = &(_2.0: std::vec::Vec); - _12 = copy ((_2.0: std::vec::Vec).1: usize); StorageLive(_13); - _13 = Le(copy _12, const ::MAX_SLICE_LEN); - assume(move _13); - StorageDead(_13); - StorageLive(_18); + // DBG: _34 = &_3; + // DBG: _33 = &((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec); + _13 = copy (((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).1: usize); StorageLive(_14); - _14 = copy _4 as *mut u8 (Transmute); + _14 = Le(copy _13, const ::MAX_SLICE_LEN); + assume(move _14); + StorageDead(_14); + StorageLive(_19); StorageLive(_15); - _15 = copy _12 as isize (IntToInt); + _15 = copy _5 as *mut u8 (Transmute); StorageLive(_16); - _16 = copy _4 as *const u8 (Transmute); - _17 = arith_offset::(move _16, move _15) -> [return: bb3, unwind unreachable]; + _16 = copy _13 as isize (IntToInt); + StorageLive(_17); + _17 = copy _5 as *const u8 (Transmute); + _18 = arith_offset::(move _17, move _16) -> [return: bb3, unwind unreachable]; } bb3: { + StorageDead(_17); + _19 = copy _18 as *mut u8 (PtrToPtr); StorageDead(_16); - _18 = copy _17 as *mut u8 (PtrToPtr); StorageDead(_15); - StorageDead(_14); - StorageDead(_18); - StorageDead(_12); - _11 = copy _17 as *const impl Sized (PtrToPtr); + StorageDead(_19); + StorageDead(_13); + _12 = copy _18 as *const impl Sized (PtrToPtr); goto -> bb4; } bb4: { - // DBG: _37 = &_2; - // DBG: _36 = &((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec); - // DBG: _38 = &(((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner); + // DBG: _38 = &_3; + // DBG: _37 = &(((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec); + // DBG: _39 = &((((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner); switchInt(const ::SIZE) -> [0: bb5, otherwise: bb6]; } bb5: { - _19 = const usize::MAX; + _20 = const usize::MAX; goto -> bb7; } bb6: { - StorageLive(_20); - _20 = copy ((((_2.0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).1: core::num::niche_types::UsizeNoHighBit); - _19 = copy _20 as usize (Transmute); - StorageDead(_20); + StorageLive(_21); + _21 = copy (((((_3.0: std::mem::MaybeDangling>).0: std::vec::Vec).0: alloc::raw_vec::RawVec).0: alloc::raw_vec::RawVecInner).1: core::num::niche_types::UsizeNoHighBit); + _20 = copy _21 as usize (Transmute); + StorageDead(_21); goto -> bb7; } bb7: { - _21 = std::vec::IntoIter:: { buf: copy _6, phantom: const ZeroSized: PhantomData, cap: move _19, alloc: const ManuallyDrop:: {{ value: std::alloc::Global }}, ptr: copy _6, end: copy _11 }; - StorageDead(_2); - StorageDead(_17); - StorageDead(_4); + _22 = std::vec::IntoIter:: { buf: copy _7, phantom: const ZeroSized: PhantomData, cap: move _20, alloc: const ManuallyDrop:: {{ value: MaybeDangling::(std::alloc::Global) }}, ptr: copy _7, end: copy _12 }; + StorageDead(_3); + StorageDead(_18); StorageDead(_5); - StorageDead(_19); - StorageDead(_11); - StorageDead(_7); StorageDead(_6); - StorageLive(_22); - _22 = move _21; + StorageDead(_20); + StorageDead(_12); + StorageDead(_8); + StorageDead(_7); + StorageLive(_23); + _23 = move _22; goto -> bb8; } bb8: { - StorageLive(_24); - _23 = &mut _22; - _24 = as Iterator>::next(move _23) -> [return: bb9, unwind: bb15]; + StorageLive(_25); + _24 = &mut _23; + _25 = as Iterator>::next(move _24) -> [return: bb9, unwind: bb15]; } bb9: { - _25 = discriminant(_24); - switchInt(move _25) -> [0: bb10, 1: bb12, otherwise: bb14]; + _26 = discriminant(_25); + switchInt(move _26) -> [0: bb10, 1: bb12, otherwise: bb14]; } bb10: { - StorageDead(_24); - drop(_22) -> [return: bb11, unwind continue]; + StorageDead(_25); + drop(_23) -> [return: bb11, unwind continue]; } bb11: { + StorageDead(_23); StorageDead(_22); - StorageDead(_21); return; } bb12: { - _26 = move ((_24 as Some).0: impl Sized); - _27 = opaque::(move _26) -> [return: bb13, unwind: bb15]; + _27 = move ((_25 as Some).0: impl Sized); + _28 = opaque::(move _27) -> [return: bb13, unwind: bb15]; } bb13: { - StorageDead(_24); + StorageDead(_25); goto -> bb8; } @@ -309,7 +327,7 @@ fn vec_move(_1: Vec) -> () { } bb15 (cleanup): { - drop(_22) -> [return: bb16, unwind terminate(cleanup)]; + drop(_23) -> [return: bb16, unwind terminate(cleanup)]; } bb16 (cleanup): { diff --git a/tests/ui/async-await/future-sizes/async-awaiting-fut.stdout b/tests/ui/async-await/future-sizes/async-awaiting-fut.stdout index b30c15bcbe6ed..13f03ffa65b5d 100644 --- a/tests/ui/async-await/future-sizes/async-awaiting-fut.stdout +++ b/tests/ui/async-await/future-sizes/async-awaiting-fut.stdout @@ -7,6 +7,8 @@ print-type-size variant `Returned`: 0 bytes print-type-size variant `Panicked`: 0 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of calls_fut<{async fn body of big_fut()}>()}>`: 3077 bytes, alignment: 1 bytes print-type-size field `.value`: 3077 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of calls_fut<{async fn body of big_fut()}>()}>`: 3077 bytes, alignment: 1 bytes +print-type-size field `.0`: 3077 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of calls_fut<{async fn body of big_fut()}>()}>`: 3077 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 3077 bytes print-type-size field `.uninit`: 0 bytes @@ -36,6 +38,8 @@ print-type-size variant `Panicked`: 1025 bytes print-type-size upvar `.fut`: 1025 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of big_fut()}>`: 1025 bytes, alignment: 1 bytes print-type-size field `.value`: 1025 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of big_fut()}>`: 1025 bytes, alignment: 1 bytes +print-type-size field `.0`: 1025 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of big_fut()}>`: 1025 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 1025 bytes print-type-size field `.uninit`: 0 bytes @@ -85,6 +89,10 @@ print-type-size type: `std::mem::ManuallyDrop`: 1 bytes, alignment: 1 byte print-type-size field `.value`: 1 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of wait()}>`: 1 bytes, alignment: 1 bytes print-type-size field `.value`: 1 bytes +print-type-size type: `std::mem::MaybeDangling`: 1 bytes, alignment: 1 bytes +print-type-size field `.0`: 1 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of wait()}>`: 1 bytes, alignment: 1 bytes +print-type-size field `.0`: 1 bytes print-type-size type: `std::mem::MaybeUninit`: 1 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 1 bytes print-type-size field `.uninit`: 0 bytes diff --git a/tests/ui/async-await/future-sizes/large-arg.stdout b/tests/ui/async-await/future-sizes/large-arg.stdout index e00420d1493f4..d51afa33595cb 100644 --- a/tests/ui/async-await/future-sizes/large-arg.stdout +++ b/tests/ui/async-await/future-sizes/large-arg.stdout @@ -7,6 +7,8 @@ print-type-size variant `Returned`: 0 bytes print-type-size variant `Panicked`: 0 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of a<[u8; 1024]>()}>`: 3075 bytes, alignment: 1 bytes print-type-size field `.value`: 3075 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of a<[u8; 1024]>()}>`: 3075 bytes, alignment: 1 bytes +print-type-size field `.0`: 3075 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of a<[u8; 1024]>()}>`: 3075 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 3075 bytes print-type-size field `.uninit`: 0 bytes @@ -24,6 +26,8 @@ print-type-size variant `Panicked`: 1024 bytes print-type-size upvar `.t`: 1024 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of b<[u8; 1024]>()}>`: 2050 bytes, alignment: 1 bytes print-type-size field `.value`: 2050 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of b<[u8; 1024]>()}>`: 2050 bytes, alignment: 1 bytes +print-type-size field `.0`: 2050 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of b<[u8; 1024]>()}>`: 2050 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 2050 bytes print-type-size field `.uninit`: 0 bytes @@ -41,6 +45,8 @@ print-type-size variant `Panicked`: 1024 bytes print-type-size upvar `.t`: 1024 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of c<[u8; 1024]>()}>`: 1025 bytes, alignment: 1 bytes print-type-size field `.value`: 1025 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of c<[u8; 1024]>()}>`: 1025 bytes, alignment: 1 bytes +print-type-size field `.0`: 1025 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of c<[u8; 1024]>()}>`: 1025 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 1025 bytes print-type-size field `.uninit`: 0 bytes diff --git a/tests/ui/print_type_sizes/async.stdout b/tests/ui/print_type_sizes/async.stdout index d3d6b6471c6ef..d2adff80e3c9c 100644 --- a/tests/ui/print_type_sizes/async.stdout +++ b/tests/ui/print_type_sizes/async.stdout @@ -12,6 +12,8 @@ print-type-size variant `Panicked`: 8192 bytes print-type-size upvar `.arg`: 8192 bytes print-type-size type: `std::mem::ManuallyDrop<[u8; 8192]>`: 8192 bytes, alignment: 1 bytes print-type-size field `.value`: 8192 bytes +print-type-size type: `std::mem::MaybeDangling<[u8; 8192]>`: 8192 bytes, alignment: 1 bytes +print-type-size field `.0`: 8192 bytes print-type-size type: `std::mem::MaybeUninit<[u8; 8192]>`: 8192 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 8192 bytes print-type-size field `.uninit`: 0 bytes @@ -47,6 +49,8 @@ print-type-size type: `std::ptr::NonNull`: 8 bytes, print-type-size field `.pointer`: 8 bytes print-type-size type: `std::mem::ManuallyDrop<{async fn body of wait()}>`: 1 bytes, alignment: 1 bytes print-type-size field `.value`: 1 bytes +print-type-size type: `std::mem::MaybeDangling<{async fn body of wait()}>`: 1 bytes, alignment: 1 bytes +print-type-size field `.0`: 1 bytes print-type-size type: `std::mem::MaybeUninit<{async fn body of wait()}>`: 1 bytes, alignment: 1 bytes print-type-size variant `MaybeUninit`: 1 bytes print-type-size field `.uninit`: 0 bytes diff --git a/tests/ui/print_type_sizes/coroutine_discr_placement.stdout b/tests/ui/print_type_sizes/coroutine_discr_placement.stdout index 4ce1ce46f6e82..b51beb514ba80 100644 --- a/tests/ui/print_type_sizes/coroutine_discr_placement.stdout +++ b/tests/ui/print_type_sizes/coroutine_discr_placement.stdout @@ -11,6 +11,8 @@ print-type-size variant `Returned`: 0 bytes print-type-size variant `Panicked`: 0 bytes print-type-size type: `std::mem::ManuallyDrop`: 4 bytes, alignment: 4 bytes print-type-size field `.value`: 4 bytes +print-type-size type: `std::mem::MaybeDangling`: 4 bytes, alignment: 4 bytes +print-type-size field `.0`: 4 bytes print-type-size type: `std::mem::MaybeUninit`: 4 bytes, alignment: 4 bytes print-type-size variant `MaybeUninit`: 4 bytes print-type-size field `.uninit`: 0 bytes diff --git a/tests/ui/thir-print/offset_of.stdout b/tests/ui/thir-print/offset_of.stdout index 846817f475288..ab924091ba7a0 100644 --- a/tests/ui/thir-print/offset_of.stdout +++ b/tests/ui/thir-print/offset_of.stdout @@ -68,7 +68,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::concrete).10)) - span: $DIR/offset_of.rs:37:5: 1433:57 (#0) + span: $DIR/offset_of.rs:37:5: 1437:57 (#0) } } Stmt { @@ -117,7 +117,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::concrete).20)) - span: $DIR/offset_of.rs:38:5: 1433:57 (#0) + span: $DIR/offset_of.rs:38:5: 1437:57 (#0) } } Stmt { @@ -166,7 +166,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::concrete).30)) - span: $DIR/offset_of.rs:39:5: 1433:57 (#0) + span: $DIR/offset_of.rs:39:5: 1437:57 (#0) } } Stmt { @@ -215,7 +215,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::concrete).40)) - span: $DIR/offset_of.rs:40:5: 1433:57 (#0) + span: $DIR/offset_of.rs:40:5: 1437:57 (#0) } } Stmt { @@ -264,7 +264,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::concrete).50)) - span: $DIR/offset_of.rs:41:5: 1433:57 (#0) + span: $DIR/offset_of.rs:41:5: 1437:57 (#0) } } ] @@ -864,7 +864,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::generic).12)) - span: $DIR/offset_of.rs:45:5: 1433:57 (#0) + span: $DIR/offset_of.rs:45:5: 1437:57 (#0) } } Stmt { @@ -913,7 +913,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::generic).24)) - span: $DIR/offset_of.rs:46:5: 1433:57 (#0) + span: $DIR/offset_of.rs:46:5: 1437:57 (#0) } } Stmt { @@ -962,7 +962,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::generic).36)) - span: $DIR/offset_of.rs:47:5: 1433:57 (#0) + span: $DIR/offset_of.rs:47:5: 1437:57 (#0) } } Stmt { @@ -1011,7 +1011,7 @@ body: ) else_block: None lint_level: Explicit(HirId(DefId(offset_of::generic).48)) - span: $DIR/offset_of.rs:48:5: 1433:57 (#0) + span: $DIR/offset_of.rs:48:5: 1437:57 (#0) } } ]