Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove dereferencing of Box from codegen #95576

Merged
merged 9 commits into from
Jun 21, 2022
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions compiler/rustc_codegen_ssa/src/mir/operand.rs
Original file line number Diff line number Diff line change
@@ -118,22 +118,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}

pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
if self.layout.ty.is_box() {
Copy link
Member

@RalfJung RalfJung Apr 19, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add a similar assertion to Miri? deref_operand would probably be a good spot.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also, does this mean you can remove the special cases I listed at #95453 (comment) ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The first 3 special cases have been removed. The debuginfo one probably shouldn't be changed because it could make debugging more annoying for no benefit. I'm currently investigating to see if the last one can be changed safely without breaking things.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also please remember to add a similar assertion to Miri. :)

Sorry for raising two points in the same thread. Sadly GitHub does not let one open multiple threads at the same line of code.

bug!("dereferencing {:?} in codegen", self.layout.ty);
}

let projected_ty = self
.layout
.ty
.builtin_deref(true)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe builtin_deref should take a mode enum like this:

enum BuiltinDerefMode {
    /// `Box<T>`, `&T`, and optionally `*T`.
    UserFacing {
        /// Whether to also allow `*T`.
        allow_unsafe: bool
    },

    /// `&T` and `*T`.
    LowLevel,
}

(Though LowLevel should probably be named something better)

.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self))
.ty;

let (llptr, llextra) = match self.val {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => {
// if the box's allocator isn't a ZST, then "llextra" is actually the allocator
if self.layout.ty.is_box() && !self.layout.field(cx, 1).is_zst() {
(llptr, None)
} else {
(llptr, Some(llextra))
}
}
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
};
let layout = cx.layout_of(projected_ty);
24 changes: 2 additions & 22 deletions compiler/rustc_codegen_ssa/src/mir/place.rs
Original file line number Diff line number Diff line change
@@ -446,35 +446,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::PlaceRef { projection: &place_ref.projection[..elem.0], ..place_ref },
);

// a box with a non-zst allocator should not be directly dereferenced
if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
// Extract `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
let ptr =
cg_base.extract_field(bx, 0).extract_field(bx, 0).extract_field(bx, 0);

ptr.deref(bx.cx())
} else {
cg_base.deref(bx.cx())
}
cg_base.deref(bx.cx())
} else {
bug!("using operand local {:?} as place", place_ref);
}
}
};
for elem in place_ref.projection[base..].iter() {
cg_base = match *elem {
mir::ProjectionElem::Deref => {
// a box with a non-zst allocator should not be directly dereferenced
if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
// Project `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
let ptr =
cg_base.project_field(bx, 0).project_field(bx, 0).project_field(bx, 0);

bx.load_operand(ptr).deref(bx.cx())
} else {
bx.load_operand(cg_base).deref(bx.cx())
}
}
mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
mir::ProjectionElem::Field(ref field, _) => {
cg_base.project_field(bx, field.index())
}
5 changes: 5 additions & 0 deletions compiler/rustc_const_eval/src/interpret/place.rs
Original file line number Diff line number Diff line change
@@ -313,6 +313,11 @@ where
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let val = self.read_immediate(src)?;
trace!("deref to {} on {:?}", val.layout.ty, *val);

if val.layout.ty.is_box() {
bug!("dereferencing {:?}", val.layout.ty);
}

let mplace = self.ref_to_mplace(&val)?;
self.check_mplace_access(mplace, CheckInAllocMsg::DerefTest)?;
Ok(mplace)
114 changes: 64 additions & 50 deletions compiler/rustc_const_eval/src/transform/validate.rs
Original file line number Diff line number Diff line change
@@ -240,65 +240,79 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
context: PlaceContext,
location: Location,
) {
if let ProjectionElem::Index(index) = elem {
let index_ty = self.body.local_decls[index].ty;
if index_ty != self.tcx.types.usize {
self.fail(location, format!("bad index ({:?} != usize)", index_ty))
match elem {
ProjectionElem::Index(index) => {
let index_ty = self.body.local_decls[index].ty;
if index_ty != self.tcx.types.usize {
self.fail(location, format!("bad index ({:?} != usize)", index_ty))
}
}
}
if let ProjectionElem::Field(f, ty) = elem {
let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
let fail_out_of_bounds = |this: &Self, location| {
this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
};
let check_equal = |this: &Self, location, f_ty| {
if !this.mir_assign_valid_types(ty, f_ty) {
this.fail(
ProjectionElem::Deref if self.mir_phase >= MirPhase::GeneratorsLowered => {
let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty;

if base_ty.is_box() {
self.fail(
location,
format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty),
)
}
}
ProjectionElem::Field(f, ty) => {
let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
let fail_out_of_bounds = |this: &Self, location| {
this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
};
let check_equal = |this: &Self, location, f_ty| {
if !this.mir_assign_valid_types(ty, f_ty) {
this.fail(
location,
format!(
"Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is {:?}",
parent, f, ty, f_ty
)
)
}
};
match parent_ty.ty.kind() {
ty::Tuple(fields) => {
let Some(f_ty) = fields.get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, *f_ty);
}
ty::Adt(adt_def, substs) => {
let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0));
let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, field.ty(self.tcx, substs));
}
ty::Closure(_, substs) => {
let substs = substs.as_closure();
let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
ty::Generator(_, substs, _) => {
let substs = substs.as_generator();
let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
_ => {
self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
}
};

match parent_ty.ty.kind() {
ty::Tuple(fields) => {
let Some(f_ty) = fields.get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, *f_ty);
}
ty::Adt(adt_def, substs) => {
let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0));
let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, field.ty(self.tcx, substs));
}
ty::Closure(_, substs) => {
let substs = substs.as_closure();
let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
ty::Generator(_, substs, _) => {
let substs = substs.as_generator();
let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
fail_out_of_bounds(self, location);
return;
};
check_equal(self, location, f_ty);
}
_ => {
self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
}
}
}
_ => {}
}
self.super_projection_elem(local, proj_base, elem, context, location);
}
12 changes: 12 additions & 0 deletions compiler/rustc_index/src/bit_set.rs
Original file line number Diff line number Diff line change
@@ -340,6 +340,12 @@ impl<T: Idx> BitRelations<BitSet<T>> for BitSet<T> {
}
}

impl<T: Idx> From<GrowableBitSet<T>> for BitSet<T> {
fn from(bit_set: GrowableBitSet<T>) -> Self {
bit_set.bit_set
}
}

/// A fixed-size bitset type with a partially dense, partially sparse
/// representation. The bitset is broken into chunks, and chunks that are all
/// zeros or all ones are represented and handled very efficiently.
@@ -1469,6 +1475,12 @@ impl<T: Idx> GrowableBitSet<T> {
}
}

impl<T: Idx> From<BitSet<T>> for GrowableBitSet<T> {
fn from(bit_set: BitSet<T>) -> Self {
Self { bit_set }
}
}

/// A fixed-size 2D bit matrix type with a dense representation.
///
/// `R` and `C` are index types used to identify rows and columns respectively;
1 change: 1 addition & 0 deletions compiler/rustc_middle/src/mir/mod.rs
Original file line number Diff line number Diff line change
@@ -195,6 +195,7 @@ pub enum MirPhase {
/// Beginning with this phase, the following variants are disallowed:
/// * [`TerminatorKind::Yield`](terminator::TerminatorKind::Yield)
/// * [`TerminatorKind::GeneratorDrop`](terminator::TerminatorKind::GeneratorDrop)
/// * [`ProjectionElem::Deref`] of `Box`
GeneratorsLowered = 6,
Optimized = 7,
}
13 changes: 12 additions & 1 deletion compiler/rustc_mir_dataflow/src/elaborate_drops.rs
Original file line number Diff line number Diff line change
@@ -410,7 +410,18 @@ where
fn open_drop_for_box(&mut self, adt: ty::AdtDef<'tcx>, substs: SubstsRef<'tcx>) -> BasicBlock {
debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);

let interior = self.tcx().mk_place_deref(self.place);
// drop glue is sent straight to codegen
// box cannot be directly dereferenced
let unique_ty = adt.non_enum_variant().fields[0].ty(self.tcx(), substs);
let nonnull_ty =
unique_ty.ty_adt_def().unwrap().non_enum_variant().fields[0].ty(self.tcx(), substs);
let ptr_ty = self.tcx().mk_imm_ptr(substs[0].expect_ty());

let unique_place = self.tcx().mk_place_field(self.place, Field::new(0), unique_ty);
let nonnull_place = self.tcx().mk_place_field(unique_place, Field::new(0), nonnull_ty);
let ptr_place = self.tcx().mk_place_field(nonnull_place, Field::new(0), ptr_ty);
let interior = self.tcx().mk_place_deref(ptr_place);

let interior_path = self.elaborator.deref_subpath(self.path);

let succ = self.box_free_block(adt, substs, self.succ, self.unwind);
184 changes: 184 additions & 0 deletions compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
//! This pass transforms derefs of Box into a deref of the pointer inside Box.
//!
//! Box is not actually a pointer so it is incorrect to dereference it directly.
use crate::MirPass;
use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_middle::mir::patch::MirPatch;
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::*;
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{Ty, TyCtxt};

/// Constructs the types used when accessing a Box's pointer
pub fn build_ptr_tys<'tcx>(
tcx: TyCtxt<'tcx>,
pointee: Ty<'tcx>,
unique_did: DefId,
nonnull_did: DefId,
) -> (Ty<'tcx>, Ty<'tcx>, Ty<'tcx>) {
let substs = tcx.intern_substs(&[pointee.into()]);
let unique_ty = tcx.bound_type_of(unique_did).subst(tcx, substs);
let nonnull_ty = tcx.bound_type_of(nonnull_did).subst(tcx, substs);
let ptr_ty = tcx.mk_imm_ptr(pointee);

(unique_ty, nonnull_ty, ptr_ty)
}

// Constructs the projection needed to access a Box's pointer
pub fn build_projection<'tcx>(
unique_ty: Ty<'tcx>,
nonnull_ty: Ty<'tcx>,
ptr_ty: Ty<'tcx>,
) -> [PlaceElem<'tcx>; 3] {
[
PlaceElem::Field(Field::new(0), unique_ty),
PlaceElem::Field(Field::new(0), nonnull_ty),
PlaceElem::Field(Field::new(0), ptr_ty),
]
}

struct ElaborateBoxDerefVisitor<'tcx, 'a> {
tcx: TyCtxt<'tcx>,
unique_did: DefId,
nonnull_did: DefId,
local_decls: &'a mut LocalDecls<'tcx>,
patch: MirPatch<'tcx>,
}

impl<'tcx, 'a> MutVisitor<'tcx> for ElaborateBoxDerefVisitor<'tcx, 'a> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}

fn visit_place(
&mut self,
place: &mut Place<'tcx>,
context: visit::PlaceContext,
location: Location,
) {
let tcx = self.tcx;

let base_ty = self.local_decls[place.local].ty;

// Derefer ensures that derefs are always the first projection
if place.projection.first() == Some(&PlaceElem::Deref) && base_ty.is_box() {
let source_info = self.local_decls[place.local].source_info;

let (unique_ty, nonnull_ty, ptr_ty) =
build_ptr_tys(tcx, base_ty.boxed_ty(), self.unique_did, self.nonnull_did);

let ptr_local = self.patch.new_temp(ptr_ty, source_info.span);
self.local_decls.push(LocalDecl::new(ptr_ty, source_info.span));

self.patch.add_statement(location, StatementKind::StorageLive(ptr_local));

self.patch.add_assign(
location,
Place::from(ptr_local),
Rvalue::Use(Operand::Copy(
Place::from(place.local)
.project_deeper(&build_projection(unique_ty, nonnull_ty, ptr_ty), tcx),
)),
);

place.local = ptr_local;

self.patch.add_statement(
Location { block: location.block, statement_index: location.statement_index + 1 },
StatementKind::StorageDead(ptr_local),
);
}

self.super_place(place, context, location);
}
}

pub struct ElaborateBoxDerefs;

impl<'tcx> MirPass<'tcx> for ElaborateBoxDerefs {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
if let Some(def_id) = tcx.lang_items().owned_box() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not new in this PR, but we should probably start caching information like this by computing it when we create ElaborateBoxDerefs instead of fetching it in every run of the opt. Don't fix this in this PR, mostly leaving this comment as a reminder for myself to figure out a way to make this convenient in the future.

let unique_did = tcx.adt_def(def_id).non_enum_variant().fields[0].did;

let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should probably be using a lang item for Unique rather than just expecting it to be right. Perhaps even #[lang = "owned_box"] could wf check that the first field is Unique.

Reported as an ICE by #98372; rustc_codegen_gcc's minicore defines Box as (*mut T, A).

};

let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;

let patch = MirPatch::new(body);

let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();

let mut visitor =
ElaborateBoxDerefVisitor { tcx, unique_did, nonnull_did, local_decls, patch };

for (block, BasicBlockData { statements, terminator, .. }) in
basic_blocks.iter_enumerated_mut()
{
let mut index = 0;
for statement in statements {
let location = Location { block, statement_index: index };
visitor.visit_statement(statement, location);
index += 1;
}

if let Some(terminator) = terminator
&& !matches!(terminator.kind, TerminatorKind::Yield{..})
{
let location = Location { block, statement_index: index };
visitor.visit_terminator(terminator, location);
}

let location = Location { block, statement_index: index };
match terminator {
// yielding into a box is handled when lowering generators
Some(Terminator { kind: TerminatorKind::Yield { value, .. }, .. }) => {
visitor.visit_operand(value, location);
}
Some(terminator) => {
visitor.visit_terminator(terminator, location);
}
None => {}
}
}

visitor.patch.apply(body);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One thing that came up in the derefer PR is that derefs also happen in VarDebugInfo, but we don't have a good way to avoid multiple derefs there yet, but for your PR it may be useful to also unroll box derefs into multiple derefs

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What code actually generates debug info that dereferences box?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I pushed some code to do this, but I have no idea if its actually correct because I don't know how to test it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure either. You could just panic the def id to find a function that exhibits it and then try to make a mir opt out of it?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ooh, maybe it works via box patterns?


for debug_info in body.var_debug_info.iter_mut() {
if let VarDebugInfoContents::Place(place) = &mut debug_info.value {
let mut new_projections: Option<Vec<_>> = None;
let mut last_deref = 0;

for (i, (base, elem)) in place.iter_projections().enumerate() {
let base_ty = base.ty(&body.local_decls, tcx).ty;

if elem == PlaceElem::Deref && base_ty.is_box() {
let new_projections = new_projections.get_or_insert_default();

let (unique_ty, nonnull_ty, ptr_ty) =
build_ptr_tys(tcx, base_ty.boxed_ty(), unique_did, nonnull_did);

new_projections.extend_from_slice(&base.projection[last_deref..]);
new_projections.extend_from_slice(&build_projection(
unique_ty, nonnull_ty, ptr_ty,
));
new_projections.push(PlaceElem::Deref);

last_deref = i;
}
}

if let Some(mut new_projections) = new_projections {
new_projections.extend_from_slice(&place.projection[last_deref..]);
place.projection = tcx.intern_place_elems(&new_projections);
}
}
}
} else {
// box is not present, this pass doesn't need to do anything
}
}
}
94 changes: 84 additions & 10 deletions compiler/rustc_mir_transform/src/generator.rs
Original file line number Diff line number Diff line change
@@ -56,7 +56,7 @@ use crate::MirPass;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::lang_items::LangItem;
use rustc_index::bit_set::{BitMatrix, BitSet};
use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet};
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::dump_mir;
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
@@ -206,7 +206,7 @@ struct SuspensionPoint<'tcx> {
/// Which block to jump to if the generator is dropped in this state.
drop: Option<BasicBlock>,
/// Set of locals that have live storage while at this suspension point.
storage_liveness: BitSet<Local>,
storage_liveness: GrowableBitSet<Local>,
}

struct TransformVisitor<'tcx> {
@@ -362,7 +362,7 @@ impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
resume,
resume_arg,
drop,
storage_liveness: self.storage_liveness[block].clone().unwrap(),
storage_liveness: self.storage_liveness[block].clone().unwrap().into(),
});

VariantIdx::new(state)
@@ -1177,6 +1177,8 @@ fn create_cases<'tcx>(
transform: &TransformVisitor<'tcx>,
operation: Operation,
) -> Vec<(usize, BasicBlock)> {
let tcx = transform.tcx;

let source_info = SourceInfo::outermost(body.span);

transform
@@ -1209,13 +1211,85 @@ fn create_cases<'tcx>(
if operation == Operation::Resume {
// Move the resume argument to the destination place of the `Yield` terminator
let resume_arg = Local::new(2); // 0 = return, 1 = self
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
point.resume_arg,
Rvalue::Use(Operand::Move(resume_arg.into())),
))),
});

// handle `box yield` properly
let box_place = if let [projection @ .., ProjectionElem::Deref] =
&**point.resume_arg.projection
{
let box_place =
Place::from(point.resume_arg.local).project_deeper(projection, tcx);

let box_ty = box_place.ty(&body.local_decls, tcx).ty;

if box_ty.is_box() { Some((box_place, box_ty)) } else { None }
} else {
None
};

if let Some((box_place, box_ty)) = box_place {
let unique_did = box_ty
.ty_adt_def()
.expect("expected Box to be an Adt")
.non_enum_variant()
.fields[0]
.did;

let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
};

let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;

let (unique_ty, nonnull_ty, ptr_ty) =
crate::elaborate_box_derefs::build_ptr_tys(
tcx,
box_ty.boxed_ty(),
unique_did,
nonnull_did,
);

let ptr_local = body.local_decls.push(LocalDecl::new(ptr_ty, body.span));

statements.push(Statement {
source_info,
kind: StatementKind::StorageLive(ptr_local),
});

statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
Place::from(ptr_local),
Rvalue::Use(Operand::Copy(box_place.project_deeper(
&crate::elaborate_box_derefs::build_projection(
unique_ty, nonnull_ty, ptr_ty,
),
tcx,
))),
))),
});

statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
Place::from(ptr_local)
.project_deeper(&[ProjectionElem::Deref], tcx),
Rvalue::Use(Operand::Move(resume_arg.into())),
))),
});

statements.push(Statement {
source_info,
kind: StatementKind::StorageDead(ptr_local),
});
} else {
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
point.resume_arg,
Rvalue::Use(Operand::Move(resume_arg.into())),
))),
});
}
}

// Then jump to the real target
2 changes: 2 additions & 0 deletions compiler/rustc_mir_transform/src/lib.rs
Original file line number Diff line number Diff line change
@@ -57,6 +57,7 @@ mod deref_separator;
mod dest_prop;
pub mod dump_mir;
mod early_otherwise_branch;
mod elaborate_box_derefs;
mod elaborate_drops;
mod function_item_references;
mod generator;
@@ -427,6 +428,7 @@ fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tc
// `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
// but before optimizations begin.
&deref_separator::Derefer,
&elaborate_box_derefs::ElaborateBoxDerefs,
&add_retag::AddRetag,
&lower_intrinsics::LowerIntrinsics,
&simplify::SimplifyCfg::new("elaborate-drops"),
4 changes: 3 additions & 1 deletion src/doc/unstable-book/src/language-features/lang-items.md
Original file line number Diff line number Diff line change
@@ -23,8 +23,10 @@ use core::panic::PanicInfo;
extern crate libc;
struct Unique<T>(*mut T);
#[lang = "owned_box"]
pub struct Box<T>(*mut T);
pub struct Box<T>(Unique<T>);
#[lang = "exchange_malloc"]
unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
40 changes: 20 additions & 20 deletions src/test/codegen/loads.rs
Original file line number Diff line number Diff line change
@@ -28,101 +28,101 @@ pub fn ptr_alignment_helper(x: &&()) {}
// CHECK-LABEL: @load_ref
#[no_mangle]
pub fn load_ref<'a>(x: &&'a i32) -> &'a i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_ref_higher_alignment
#[no_mangle]
pub fn load_ref_higher_alignment<'a>(x: &&'a Align16) -> &'a Align16 {
// CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_scalar_pair
#[no_mangle]
pub fn load_scalar_pair<'a>(x: &(&'a i32, &'a Align16)) -> (&'a i32, &'a Align16) {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_raw_pointer
#[no_mangle]
pub fn load_raw_pointer<'a>(x: &*const i32) -> *const i32 {
// loaded raw pointer should not have !nonnull, !align, or !noundef metadata
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
// loaded raw pointer should not have !nonnull, !align, or !noundef metadata
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
*x
}

// CHECK-LABEL: @load_box
#[no_mangle]
pub fn load_box<'a>(x: Box<Box<i32>>) -> Box<i32> {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.*}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_bool
#[no_mangle]
pub fn load_bool(x: &bool) -> bool {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_maybeuninit_bool
#[no_mangle]
pub fn load_maybeuninit_bool(x: &MaybeUninit<bool>) -> MaybeUninit<bool> {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
*x
}

// CHECK-LABEL: @load_enum_bool
#[no_mangle]
pub fn load_enum_bool(x: &MyBool) -> MyBool {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_maybeuninit_enum_bool
#[no_mangle]
pub fn load_maybeuninit_enum_bool(x: &MaybeUninit<MyBool>) -> MaybeUninit<MyBool> {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
*x
}

// CHECK-LABEL: @load_int
#[no_mangle]
pub fn load_int(x: &u16) -> u16 {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
*x
}

// CHECK-LABEL: @load_nonzero_int
#[no_mangle]
pub fn load_nonzero_int(x: &NonZeroU16) -> NonZeroU16 {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
*x
}

// CHECK-LABEL: @load_option_nonzero_int
#[no_mangle]
pub fn load_option_nonzero_int(x: &Option<NonZeroU16>) -> Option<NonZeroU16> {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
*x
}

// CHECK-LABEL: @borrow
#[no_mangle]
pub fn borrow(x: &i32) -> &i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
&x; // keep variable in an alloca
x
}

// CHECK-LABEL: @_box
#[no_mangle]
pub fn _box(x: Box<i32>) -> i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, align [[PTR_ALIGNMENT]]
*x
}

@@ -131,8 +131,8 @@ pub fn _box(x: Box<i32>) -> i32 {
// dependent alignment
#[no_mangle]
pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
x
}

@@ -141,8 +141,8 @@ pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
// dependent alignment
#[no_mangle]
pub fn small_struct_alignment(x: Bytes) -> Bytes {
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
x
}

14 changes: 12 additions & 2 deletions src/test/mir-opt/const_prop/boxes.main.ConstProp.diff
Original file line number Diff line number Diff line change
@@ -10,6 +10,10 @@
let mut _5: usize; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _6: *mut u8; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _7: std::boxed::Box<i32>; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _8: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _9: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _10: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _11: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
scope 1 {
debug x => _1; // in scope 1 at $DIR/boxes.rs:12:9: 12:10
}
@@ -34,10 +38,16 @@
bb1: {
StorageLive(_7); // scope 0 at $DIR/boxes.rs:12:14: 12:22
_7 = ShallowInitBox(move _6, i32); // scope 0 at $DIR/boxes.rs:12:14: 12:22
(*_7) = const 42_i32; // scope 0 at $DIR/boxes.rs:12:19: 12:21
StorageLive(_8); // scope 0 at $DIR/boxes.rs:12:19: 12:21
_8 = (((_7.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:19: 12:21
(*_8) = const 42_i32; // scope 0 at $DIR/boxes.rs:12:19: 12:21
StorageDead(_8); // scope 0 at $DIR/boxes.rs:12:14: 12:22
_3 = move _7; // scope 0 at $DIR/boxes.rs:12:14: 12:22
StorageDead(_7); // scope 0 at $DIR/boxes.rs:12:21: 12:22
_2 = (*_3); // scope 0 at $DIR/boxes.rs:12:13: 12:22
StorageLive(_9); // scope 0 at $DIR/boxes.rs:12:13: 12:22
_9 = (((_3.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:13: 12:22
_2 = (*_9); // scope 0 at $DIR/boxes.rs:12:13: 12:22
StorageDead(_9); // scope 0 at $DIR/boxes.rs:12:13: 12:26
_1 = Add(move _2, const 0_i32); // scope 0 at $DIR/boxes.rs:12:13: 12:26
StorageDead(_2); // scope 0 at $DIR/boxes.rs:12:25: 12:26
drop(_3) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/boxes.rs:12:26: 12:27
Original file line number Diff line number Diff line change
@@ -9,14 +9,16 @@
let mut _4: *mut u8; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _6: (); // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
+ let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _8: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
+ let mut _9: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
scope 1 {
debug _x => _1; // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
}
scope 2 {
}
+ scope 3 (inlined Vec::<u32>::new) { // at $DIR/inline-into-box-place.rs:8:33: 8:43
+ let mut _8: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ let mut _10: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ }

bb0: {
@@ -32,11 +34,13 @@
bb1: {
StorageLive(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
_5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
- (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _7 = &mut (*_5); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _8 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_7 = (((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: std::ptr::NonNull<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- (*_7) = Vec::<u32>::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _9 = &mut (*_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _10 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
// mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:33: 8:41
- // + user_ty: UserType(1)
@@ -47,15 +51,16 @@
+ // + span: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ // + user_ty: UserType(0)
+ // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Unevaluated(alloc::raw_vec::RawVec::<T>::NEW, [u32], None) }
+ Deinit((*_7)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).0: alloc::raw_vec::RawVec<u32>) = move _8; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ Deinit((*_9)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).0: alloc::raw_vec::RawVec<u32>) = move _10; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_1 = move _5; // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
StorageDead(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
_0 = const (); // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
- drop(_1) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
- drop(_1) -> [return: bb3, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
+ drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
}

@@ -66,15 +71,16 @@
}

- bb4 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
- }
-
- bb5 (cleanup): {
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb5; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- // mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:42: 8:43
- // + literal: Const { ty: unsafe fn(Unique<Vec<u32>>, std::alloc::Global) {alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>}, val: Value(Scalar(<ZST>)) }
- }
-
- bb5 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
}
}

Original file line number Diff line number Diff line change
@@ -9,14 +9,16 @@
let mut _4: *mut u8; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _6: (); // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
+ let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _8: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
+ let mut _9: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
scope 1 {
debug _x => _1; // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
}
scope 2 {
}
+ scope 3 (inlined Vec::<u32>::new) { // at $DIR/inline-into-box-place.rs:8:33: 8:43
+ let mut _8: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ let mut _10: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ }

bb0: {
@@ -32,11 +34,13 @@
bb1: {
StorageLive(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
_5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
- (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _7 = &mut (*_5); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _8 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_7 = (((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: std::ptr::NonNull<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- (*_7) = Vec::<u32>::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _9 = &mut (*_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _10 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
// mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:33: 8:41
- // + user_ty: UserType(1)
@@ -47,15 +51,16 @@
+ // + span: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ // + user_ty: UserType(0)
+ // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Unevaluated(alloc::raw_vec::RawVec::<T>::NEW, [u32], None) }
+ Deinit((*_7)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).0: alloc::raw_vec::RawVec<u32>) = move _8; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ Deinit((*_9)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).0: alloc::raw_vec::RawVec<u32>) = move _10; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_1 = move _5; // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
StorageDead(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
_0 = const (); // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
- drop(_1) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
- drop(_1) -> [return: bb3, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
+ drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
}

@@ -66,15 +71,16 @@
}

- bb4 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
- }
-
- bb5 (cleanup): {
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb5; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- // mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:42: 8:43
- // + literal: Const { ty: unsafe fn(Unique<Vec<u32>>, std::alloc::Global) {alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>}, val: Value(Scalar(<ZST>)) }
- }
-
- bb5 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
}
}

Original file line number Diff line number Diff line change
@@ -11,6 +11,7 @@ fn b(_1: &mut Box<T>) -> &mut T {
let mut _5: &mut T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _6: &mut T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _7: std::boxed::Box<T>; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _8: *const T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
}

bb0: {
@@ -22,7 +23,10 @@ fn b(_1: &mut Box<T>) -> &mut T {
StorageLive(_6); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_7 = move (*_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_6 = &mut (*_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_8 = (((_7.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_6 = &mut (*_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_5 = &mut (*_6); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_3 = &mut (*_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
Original file line number Diff line number Diff line change
@@ -8,6 +8,7 @@ fn d(_1: &Box<T>) -> &T {
scope 1 (inlined <Box<T> as AsRef<T>>::as_ref) { // at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
debug self => _3; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _4: std::boxed::Box<T>; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _5: *const T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
}

bb0: {
@@ -16,7 +17,10 @@ fn d(_1: &Box<T>) -> &T {
_3 = &(*_1); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
StorageLive(_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_4 = move (*_3); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_2 = &(*_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_5 = (((_4.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_2 = &(*_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_0 = &(*_2); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
StorageDead(_3); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:14: 18:15
18 changes: 10 additions & 8 deletions src/test/ui/mir/ssa-analysis-regression-50041.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,29 @@
// build-pass
// compile-flags: -Z mir-opt-level=4

#![crate_type="lib"]
#![crate_type = "lib"]
#![feature(lang_items)]
#![no_std]

struct NonNull<T: ?Sized>(*mut T);

struct Unique<T: ?Sized>(NonNull<T>);

#[lang = "owned_box"]
pub struct Box<T: ?Sized>(*mut T, ());
pub struct Box<T: ?Sized>(Unique<T>);

impl<T: ?Sized> Drop for Box<T> {
fn drop(&mut self) {
}
fn drop(&mut self) {}
}

#[lang = "box_free"]
#[inline(always)]
unsafe fn box_free<T: ?Sized>(ptr: *mut T, _: ()) {
dealloc(ptr)
unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
dealloc(ptr.0.0)
}

#[inline(never)]
fn dealloc<T: ?Sized>(_: *mut T) {
}
fn dealloc<T: ?Sized>(_: *mut T) {}

pub struct Foo<T>(T);