Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rewrite the Visitor for non_ssa_locals #73854

Closed
270 changes: 115 additions & 155 deletions src/librustc_codegen_ssa/mir/analyze.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,40 +5,32 @@ use super::FunctionCx;
use crate::traits::*;
use rustc_data_structures::graph::dominators::Dominators;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc_index::vec::IndexVec;
use rustc_middle::mir::traversal;
use rustc_middle::mir::visit::{
MutatingUseContext, NonMutatingUseContext, NonUseContext, PlaceContext, Visitor,
};
use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::{self, Location, TerminatorKind};
use rustc_middle::ty;
use rustc_middle::ty::layout::HasTyCtxt;
use rustc_middle::ty::layout::{HasTyCtxt, TyAndLayout};
use rustc_target::abi::LayoutOf;

pub fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
fx: &FunctionCx<'a, 'tcx, Bx>,
) -> BitSet<mir::Local> {
trace!("non_ssa_locals({:?})", fx.instance.def_id());

let mir = fx.mir;
let mut analyzer = LocalAnalyzer::new(fx);

analyzer.visit_body(&mir);
for (block, data) in traversal::reverse_postorder(mir) {
analyzer.visit_basic_block_data(block, data);
}

for (local, decl) in mir.local_decls.iter_enumerated() {
let ty = fx.monomorphize(&decl.ty);
debug!("local {:?} has type `{}`", local, ty);
let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
if fx.cx.is_backend_immediate(layout) {
// These sorts of types are immediates that we can store
// in an Value without an alloca.
} else if fx.cx.is_backend_scalar_pair(layout) {
// We allow pairs and uses of any of their 2 fields.
} else {
// These sorts of types require an alloca. Note that
// is_llvm_immediate() may *still* be true, particularly
// for newtypes, but we currently force some types
// (e.g., structs) into an alloca unconditionally, just so
// that we don't have to deal with having two pathways
// (gep vs extractvalue etc).

if ty_requires_alloca(&analyzer.fx, layout) {
analyzer.not_ssa(local);
}
Comment on lines +33 to 35
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we should do this first, and even keep the monomorphized TyAndLayouts around, to make it cheaper to check if "consume" field projections can be handled in an SSA way.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually, huh, we have access to FunctionCx, I'm tempted to say we should create the FunctionCx with all of the local TyAndLayouts already computed (although we couldn't have placeholder LocalRefs easily I don't think?) - but maybe that's too much for this PR?

}
Expand All @@ -50,161 +42,39 @@ struct LocalAnalyzer<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
fx: &'mir FunctionCx<'a, 'tcx, Bx>,
dominators: Dominators<mir::BasicBlock>,
non_ssa_locals: BitSet<mir::Local>,
// The location of the first visited direct assignment to each
// local, or an invalid location (out of bounds `block` index).
first_assignment: IndexVec<mir::Local, Location>,

/// The location of the first visited direct assignment to each local.
first_assignment: IndexVec<mir::Local, Option<Location>>,
}

impl<Bx: BuilderMethods<'a, 'tcx>> LocalAnalyzer<'mir, 'a, 'tcx, Bx> {
fn new(fx: &'mir FunctionCx<'a, 'tcx, Bx>) -> Self {
let invalid_location = mir::BasicBlock::new(fx.mir.basic_blocks().len()).start_location();
let dominators = fx.mir.dominators();
let mut analyzer = LocalAnalyzer {
fx,
dominators,
non_ssa_locals: BitSet::new_empty(fx.mir.local_decls.len()),
first_assignment: IndexVec::from_elem(invalid_location, &fx.mir.local_decls),
first_assignment: IndexVec::from_elem(None, &fx.mir.local_decls),
};

// Arguments get assigned to by means of the function being called
for arg in fx.mir.args_iter() {
analyzer.first_assignment[arg] = mir::START_BLOCK.start_location();
analyzer.assign(arg, mir::START_BLOCK.start_location());
}

analyzer
}

fn first_assignment(&self, local: mir::Local) -> Option<Location> {
let location = self.first_assignment[local];
if location.block.index() < self.fx.mir.basic_blocks().len() {
Some(location)
} else {
None
}
}

fn not_ssa(&mut self, local: mir::Local) {
debug!("marking {:?} as non-SSA", local);
self.non_ssa_locals.insert(local);
}

fn assign(&mut self, local: mir::Local, location: Location) {
if self.first_assignment(local).is_some() {
if self.first_assignment[local].is_some() {
self.not_ssa(local);
} else {
self.first_assignment[local] = location;
}
}

fn process_place(
&mut self,
place_ref: &mir::PlaceRef<'tcx>,
context: PlaceContext,
location: Location,
) {
let cx = self.fx.cx;

if let &[ref proj_base @ .., elem] = place_ref.projection {
let mut base_context = if context.is_mutating_use() {
PlaceContext::MutatingUse(MutatingUseContext::Projection)
} else {
PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
};

// Allow uses of projections that are ZSTs or from scalar fields.
let is_consume = match context {
PlaceContext::NonMutatingUse(
NonMutatingUseContext::Copy | NonMutatingUseContext::Move,
) => true,
_ => false,
};
if is_consume {
let base_ty =
mir::Place::ty_from(place_ref.local, proj_base, self.fx.mir, cx.tcx());
let base_ty = self.fx.monomorphize(&base_ty);

// ZSTs don't require any actual memory access.
let elem_ty = base_ty.projection_ty(cx.tcx(), self.fx.monomorphize(&elem)).ty;
let span = self.fx.mir.local_decls[place_ref.local].source_info.span;
if cx.spanned_layout_of(elem_ty, span).is_zst() {
return;
}

if let mir::ProjectionElem::Field(..) = elem {
let layout = cx.spanned_layout_of(base_ty.ty, span);
if cx.is_backend_immediate(layout) || cx.is_backend_scalar_pair(layout) {
// Recurse with the same context, instead of `Projection`,
// potentially stopping at non-operand projections,
// which would trigger `not_ssa` on locals.
base_context = context;
}
}
}

if let mir::ProjectionElem::Deref = elem {
// Deref projections typically only read the pointer.
// (the exception being `VarDebugInfo` contexts, handled below)
base_context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);

// Indirect debuginfo requires going through memory, that only
// the debugger accesses, following our emitted DWARF pointer ops.
//
// FIXME(eddyb) Investigate the possibility of relaxing this, but
// note that `llvm.dbg.declare` *must* be used for indirect places,
// even if we start using `llvm.dbg.value` for all other cases,
// as we don't necessarily know when the value changes, but only
// where it lives in memory.
//
// It's possible `llvm.dbg.declare` could support starting from
// a pointer that doesn't point to an `alloca`, but this would
// only be useful if we know the pointer being `Deref`'d comes
// from an immutable place, and if `llvm.dbg.declare` calls
// must be at the very start of the function, then only function
// arguments could contain such pointers.
if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
// We use `NonUseContext::VarDebugInfo` for the base,
// which might not force the base local to memory,
// so we have to do it manually.
self.visit_local(&place_ref.local, context, location);
}
}

// `NonUseContext::VarDebugInfo` needs to flow all the
// way down to the base local (see `visit_local`).
if context == PlaceContext::NonUse(NonUseContext::VarDebugInfo) {
base_context = context;
}

self.process_place(
&mir::PlaceRef { local: place_ref.local, projection: proj_base },
base_context,
location,
);
// HACK(eddyb) this emulates the old `visit_projection_elem`, this
// entire `visit_place`-like `process_place` method should be rewritten,
// now that we have moved to the "slice of projections" representation.
if let mir::ProjectionElem::Index(local) = elem {
self.visit_local(
&local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location,
);
}
} else {
// FIXME this is super_place code, is repeated here to avoid cloning place or changing
// visit_place API
let mut context = context;

if !place_ref.projection.is_empty() {
context = if context.is_mutating_use() {
PlaceContext::MutatingUse(MutatingUseContext::Projection)
} else {
PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
};
}

self.visit_local(&place_ref.local, context, location);
self.visit_projection(place_ref.local, place_ref.projection, context, location);
self.first_assignment[local] = Some(location);
}
}
}
Expand Down Expand Up @@ -261,9 +131,81 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
self.super_terminator(terminator, location);
}

fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
debug!("visit_place(place={:?}, context={:?})", place, context);
self.process_place(&place.as_ref(), context, location);
fn visit_place(
&mut self,
place: &mir::Place<'tcx>,
mut context: PlaceContext,
location: Location,
) {
let mir::Place { local, projection } = *place;

self.super_projection(local, projection, context, location);

// Non-uses do not force locals onto the stack.
if !context.is_use() {
return;
}

let is_consume = is_consume(context);

// Reads from ZSTs do not require memory accesses and do not count when determining what
// needs to live on the stack.
if is_consume {
let ty = place.ty(self.fx.mir, self.fx.cx.tcx()).ty;
let ty = self.fx.monomorphize(&ty);
let span = self.fx.mir.local_decls[local].source_info.span;
if self.fx.cx.spanned_layout_of(ty, span).is_zst() {
return;
}
}

let is_indirect = place.is_indirect();
if is_indirect {
context = PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
}

self.visit_local(&local, context, location);

// In any context besides a simple read or pointer deref, any projections whatsoever force
// a value onto the stack.
if !is_consume && !is_indirect {
if !projection.is_empty() {
self.not_ssa(local);
}

return;
}

// Only projections inside a `Deref` can disqualify a local from being placed on the stack.
// In other words, `(*x)[idx]` does not disqualify `x` but `*(x[idx])` does.
let first_deref = projection.iter().position(|elem| matches!(elem, mir::PlaceElem::Deref));
let projections_on_base_local = &projection[..first_deref.unwrap_or(projection.len())];

// Only field projections are allowed. We check this before checking the layout of each
// projection below since computing layouts is relatively expensive.
if !projections_on_base_local.iter().all(|elem| matches!(elem, mir::PlaceElem::Field(..))) {
self.not_ssa(local);
return;
}

// Ensure that each field being projected through is handled correctly.
for (i, elem) in projections_on_base_local.iter().enumerate() {
assert!(matches!(elem, mir::PlaceElem::Field(..)));

// The inclusive range here means we check every projection prefix but the empty one.
// This is okay since the type of each local is checked in `non_ssa_locals`.
let base = &projection[..=i];

let base_ty = mir::Place::ty_from(local, base, self.fx.mir, self.fx.cx.tcx());
let base_ty = self.fx.monomorphize(&base_ty);
let span = self.fx.mir.local_decls[local].source_info.span;
let layout = self.fx.cx.spanned_layout_of(base_ty.ty, span);

if ty_requires_alloca(self.fx, layout) {
self.not_ssa(local);
return;
}
}
}

fn visit_local(&mut self, &local: &mir::Local, context: PlaceContext, location: Location) {
Expand All @@ -282,7 +224,7 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
// optimizations) require locals to be in (uninitialized) memory.
// N.B., there can be uninitialized reads of a local visited after
// an assignment to that local, if they happen on disjoint paths.
let ssa_read = match self.first_assignment(local) {
let ssa_read = match self.first_assignment[local] {
Some(assignment_location) => {
assignment_location.dominates(location, &self.dominators)
}
Expand All @@ -293,20 +235,23 @@ impl<'mir, 'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Visitor<'tcx>
}
}

PlaceContext::MutatingUse(MutatingUseContext::Projection)
| PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
unreachable!("We always use the original context from `visit_place`")
}

PlaceContext::MutatingUse(
MutatingUseContext::Store
| MutatingUseContext::AsmOutput
| MutatingUseContext::Borrow
| MutatingUseContext::AddressOf
| MutatingUseContext::Projection,
| MutatingUseContext::AddressOf,
)
| PlaceContext::NonMutatingUse(
NonMutatingUseContext::Inspect
| NonMutatingUseContext::SharedBorrow
| NonMutatingUseContext::UniqueBorrow
| NonMutatingUseContext::ShallowBorrow
| NonMutatingUseContext::AddressOf
| NonMutatingUseContext::Projection,
| NonMutatingUseContext::AddressOf,
) => {
self.not_ssa(local);
}
Expand Down Expand Up @@ -446,3 +391,18 @@ pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKi
debug!("cleanup_kinds: result={:?}", result);
result
}

/// Returns `true` if locals of this type need to be allocated on the stack.
fn ty_requires_alloca<'a, 'tcx>(
fx: &FunctionCx<'a, 'tcx, impl BuilderMethods<'a, 'tcx>>,
ty: TyAndLayout<'tcx>,
) -> bool {
!fx.cx.is_backend_immediate(ty) && !fx.cx.is_backend_scalar_pair(ty)
}

fn is_consume(context: PlaceContext) -> bool {
matches!(
context,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy | NonMutatingUseContext::Move)
)
}