From c054186ec70ae2393330886cb8dc506aab21750c Mon Sep 17 00:00:00 2001 From: Dylan MacKenzie Date: Wed, 12 Jun 2019 15:00:43 -0700 Subject: [PATCH 1/2] rustc_mir: don't pass `on_entry` when building transfer functions. This commit makes `sets.on_entry` inaccessible in `{before_,}{statement,terminator}_effect`. This field was meant to allow implementors of `BitDenotation` to access the initial state for each block (optionally with the effect of all previous statements applied via `accumulates_intrablock_state`) while defining transfer functions. However, the ability to set the initial value for the entry set of each basic block (except for START_BLOCK) no longer exists. As a result, this functionality is mostly useless, and when it *was* used it was used erroneously (see #62007). Since `on_entry` is now useless, we can also remove `BlockSets`, which held the `gen`, `kill`, and `on_entry` bitvectors and replace it with a `GenKill` struct. Variables of this type are called `trans` since they represent a transfer function. `GenKill`s are stored contiguously in `AllSets`, which reduces the number of bounds checks and may improve cache performance: one is almost never accessed without the other. Replacing `BlockSets` with `GenKill` allows us to define some new helper functions which streamline dataflow iteration and the dataflow-at-location APIs. Notably, `state_for_location` used a subtle side-effect of the `kill`/`kill_all` setters to apply the transfer function, and could be incorrect if a transfer function depended on effects of previous statements in the block on `gen_set`. --- src/librustc_mir/dataflow/at_location.rs | 77 ++---- src/librustc_mir/dataflow/graphviz.rs | 15 +- .../dataflow/impls/borrowed_locals.rs | 24 +- src/librustc_mir/dataflow/impls/borrows.rs | 50 ++-- src/librustc_mir/dataflow/impls/mod.rs | 57 ++-- .../dataflow/impls/storage_liveness.rs | 10 +- src/librustc_mir/dataflow/mod.rs | 260 ++++++++---------- src/librustc_mir/transform/elaborate_drops.rs | 6 +- src/librustc_mir/transform/rustc_peek.rs | 26 +- 9 files changed, 229 insertions(+), 296 deletions(-) diff --git a/src/librustc_mir/dataflow/at_location.rs b/src/librustc_mir/dataflow/at_location.rs index 9cba34b425350..7735528d8f8e0 100644 --- a/src/librustc_mir/dataflow/at_location.rs +++ b/src/librustc_mir/dataflow/at_location.rs @@ -4,7 +4,7 @@ use rustc::mir::{BasicBlock, Location}; use rustc_data_structures::bit_set::{BitIter, BitSet, HybridBitSet}; -use crate::dataflow::{BitDenotation, BlockSets, DataflowResults}; +use crate::dataflow::{BitDenotation, DataflowResults, GenKillSet}; use crate::dataflow::move_paths::{HasMoveData, MovePathIndex}; use std::iter; @@ -66,8 +66,7 @@ where { base_results: DataflowResults<'tcx, BD>, curr_state: BitSet, - stmt_gen: HybridBitSet, - stmt_kill: HybridBitSet, + stmt_trans: GenKillSet, } impl<'tcx, BD> FlowAtLocation<'tcx, BD> @@ -89,19 +88,17 @@ where where F: FnMut(BD::Idx), { - self.stmt_gen.iter().for_each(f) + self.stmt_trans.gen_set.iter().for_each(f) } pub fn new(results: DataflowResults<'tcx, BD>) -> Self { let bits_per_block = results.sets().bits_per_block(); let curr_state = BitSet::new_empty(bits_per_block); - let stmt_gen = HybridBitSet::new_empty(bits_per_block); - let stmt_kill = HybridBitSet::new_empty(bits_per_block); + let stmt_trans = GenKillSet::from_elem(HybridBitSet::new_empty(bits_per_block)); FlowAtLocation { base_results: results, - curr_state: curr_state, - stmt_gen: stmt_gen, - stmt_kill: stmt_kill, + curr_state, + stmt_trans, } } @@ -127,8 +124,7 @@ where F: FnOnce(BitIter<'_, BD::Idx>), { let mut curr_state = self.curr_state.clone(); - curr_state.union(&self.stmt_gen); - curr_state.subtract(&self.stmt_kill); + self.stmt_trans.apply(&mut curr_state); f(curr_state.iter()); } @@ -142,68 +138,41 @@ impl<'tcx, BD> FlowsAtLocation for FlowAtLocation<'tcx, BD> where BD: BitDenotation<'tcx> { fn reset_to_entry_of(&mut self, bb: BasicBlock) { - self.curr_state.overwrite(self.base_results.sets().on_entry_set_for(bb.index())); + self.curr_state.overwrite(self.base_results.sets().entry_set_for(bb.index())); } fn reset_to_exit_of(&mut self, bb: BasicBlock) { self.reset_to_entry_of(bb); - self.curr_state.union(self.base_results.sets().gen_set_for(bb.index())); - self.curr_state.subtract(self.base_results.sets().kill_set_for(bb.index())); + let trans = self.base_results.sets().trans_for(bb.index()); + trans.apply(&mut self.curr_state) } fn reconstruct_statement_effect(&mut self, loc: Location) { - self.stmt_gen.clear(); - self.stmt_kill.clear(); - { - let mut sets = BlockSets { - on_entry: &mut self.curr_state, - gen_set: &mut self.stmt_gen, - kill_set: &mut self.stmt_kill, - }; - self.base_results - .operator() - .before_statement_effect(&mut sets, loc); - } - self.apply_local_effect(loc); + self.stmt_trans.clear(); + self.base_results + .operator() + .before_statement_effect(&mut self.stmt_trans, loc); + self.stmt_trans.apply(&mut self.curr_state); - let mut sets = BlockSets { - on_entry: &mut self.curr_state, - gen_set: &mut self.stmt_gen, - kill_set: &mut self.stmt_kill, - }; self.base_results .operator() - .statement_effect(&mut sets, loc); + .statement_effect(&mut self.stmt_trans, loc); } fn reconstruct_terminator_effect(&mut self, loc: Location) { - self.stmt_gen.clear(); - self.stmt_kill.clear(); - { - let mut sets = BlockSets { - on_entry: &mut self.curr_state, - gen_set: &mut self.stmt_gen, - kill_set: &mut self.stmt_kill, - }; - self.base_results - .operator() - .before_terminator_effect(&mut sets, loc); - } - self.apply_local_effect(loc); + self.stmt_trans.clear(); + self.base_results + .operator() + .before_terminator_effect(&mut self.stmt_trans, loc); + self.stmt_trans.apply(&mut self.curr_state); - let mut sets = BlockSets { - on_entry: &mut self.curr_state, - gen_set: &mut self.stmt_gen, - kill_set: &mut self.stmt_kill, - }; self.base_results .operator() - .terminator_effect(&mut sets, loc); + .terminator_effect(&mut self.stmt_trans, loc); } fn apply_local_effect(&mut self, _loc: Location) { - self.curr_state.union(&self.stmt_gen); - self.curr_state.subtract(&self.stmt_kill); + self.stmt_trans.apply(&mut self.curr_state) } } diff --git a/src/librustc_mir/dataflow/graphviz.rs b/src/librustc_mir/dataflow/graphviz.rs index f62ad2fbef71f..e2c3c984cab5f 100644 --- a/src/librustc_mir/dataflow/graphviz.rs +++ b/src/librustc_mir/dataflow/graphviz.rs @@ -170,7 +170,7 @@ where MWF: MirWithFlowState<'tcx>, write!(w, "")?; // Entry - dump_set_for!(on_entry_set_for, interpret_set); + dump_set_for!(entry_set_for, interpret_set); // MIR statements write!(w, "")?; @@ -208,7 +208,7 @@ where MWF: MirWithFlowState<'tcx>, write!(w, "")?; // Entry - let set = flow.sets.on_entry_set_for(i); + let set = flow.sets.entry_set_for(i); write!(w, "{:?}", dot::escape_html(&set.to_string()))?; // Terminator @@ -221,13 +221,10 @@ where MWF: MirWithFlowState<'tcx>, } write!(w, "")?; - // Gen - let set = flow.sets.gen_set_for(i); - write!(w, "{:?}", dot::escape_html(&format!("{:?}", set)))?; - - // Kill - let set = flow.sets.kill_set_for(i); - write!(w, "{:?}", dot::escape_html(&format!("{:?}", set)))?; + // Gen/Kill + let trans = flow.sets.trans_for(i); + write!(w, "{:?}", dot::escape_html(&format!("{:?}", trans.gen_set)))?; + write!(w, "{:?}", dot::escape_html(&format!("{:?}", trans.kill_set)))?; write!(w, "")?; diff --git a/src/librustc_mir/dataflow/impls/borrowed_locals.rs b/src/librustc_mir/dataflow/impls/borrowed_locals.rs index 069ce3a78c5f9..87ee254fb74bc 100644 --- a/src/librustc_mir/dataflow/impls/borrowed_locals.rs +++ b/src/librustc_mir/dataflow/impls/borrowed_locals.rs @@ -2,7 +2,7 @@ pub use super::*; use rustc::mir::*; use rustc::mir::visit::Visitor; -use crate::dataflow::BitDenotation; +use crate::dataflow::{BitDenotation, GenKillSet}; /// This calculates if any part of a MIR local could have previously been borrowed. /// This means that once a local has been borrowed, its bit will be set @@ -33,39 +33,39 @@ impl<'a, 'tcx> BitDenotation<'tcx> for HaveBeenBorrowedLocals<'a, 'tcx> { self.body.local_decls.len() } - fn start_block_effect(&self, _sets: &mut BitSet) { + fn start_block_effect(&self, _on_entry: &mut BitSet) { // Nothing is borrowed on function entry } fn statement_effect(&self, - sets: &mut BlockSets<'_, Local>, + trans: &mut GenKillSet, loc: Location) { let stmt = &self.body[loc.block].statements[loc.statement_index]; BorrowedLocalsVisitor { - sets, + trans, }.visit_statement(stmt, loc); // StorageDead invalidates all borrows and raw pointers to a local match stmt.kind { - StatementKind::StorageDead(l) => sets.kill(l), + StatementKind::StorageDead(l) => trans.kill(l), _ => (), } } fn terminator_effect(&self, - sets: &mut BlockSets<'_, Local>, + trans: &mut GenKillSet, loc: Location) { let terminator = self.body[loc.block].terminator(); BorrowedLocalsVisitor { - sets, + trans, }.visit_terminator(terminator, loc); match &terminator.kind { // Drop terminators borrows the location TerminatorKind::Drop { location, .. } | TerminatorKind::DropAndReplace { location, .. } => { if let Some(local) = find_local(location) { - sets.gen(local); + trans.gen(local); } } _ => (), @@ -97,8 +97,8 @@ impl<'a, 'tcx> InitialFlow for HaveBeenBorrowedLocals<'a, 'tcx> { } } -struct BorrowedLocalsVisitor<'b, 'c> { - sets: &'b mut BlockSets<'c, Local>, +struct BorrowedLocalsVisitor<'gk> { + trans: &'gk mut GenKillSet, } fn find_local<'tcx>(place: &Place<'tcx>) -> Option { @@ -117,13 +117,13 @@ fn find_local<'tcx>(place: &Place<'tcx>) -> Option { }) } -impl<'tcx, 'b, 'c> Visitor<'tcx> for BorrowedLocalsVisitor<'b, 'c> { +impl<'tcx> Visitor<'tcx> for BorrowedLocalsVisitor<'_> { fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { if let Rvalue::Ref(_, _, ref place) = *rvalue { if let Some(local) = find_local(place) { - self.sets.gen(local); + self.trans.gen(local); } } diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index 7617d3b997d08..b29a4f351b58b 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -9,7 +9,7 @@ use rustc_data_structures::bit_set::{BitSet, BitSetOperator}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use crate::dataflow::{BitDenotation, BlockSets, InitialFlow}; +use crate::dataflow::{BitDenotation, InitialFlow, GenKillSet}; use crate::borrow_check::nll::region_infer::RegionInferenceContext; use crate::borrow_check::nll::ToRegionVid; use crate::borrow_check::places_conflict; @@ -168,7 +168,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { /// Add all borrows to the kill set, if those borrows are out of scope at `location`. /// That means they went out of a nonlexical scope fn kill_loans_out_of_scope_at_location(&self, - sets: &mut BlockSets<'_, BorrowIndex>, + trans: &mut GenKillSet, location: Location) { // NOTE: The state associated with a given `location` // reflects the dataflow on entry to the statement. @@ -182,14 +182,14 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { // region, then setting that gen-bit will override any // potential kill introduced here. if let Some(indices) = self.borrows_out_of_scope_at_location.get(&location) { - sets.kill_all(indices); + trans.kill_all(indices); } } /// Kill any borrows that conflict with `place`. fn kill_borrows_on_place( &self, - sets: &mut BlockSets<'_, BorrowIndex>, + trans: &mut GenKillSet, place: &Place<'tcx> ) { debug!("kill_borrows_on_place: place={:?}", place); @@ -206,7 +206,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { // local must conflict. This is purely an optimization so we don't have to call // `places_conflict` for every borrow. if let Place::Base(PlaceBase::Local(_)) = place { - sets.kill_all(other_borrows_of_local); + trans.kill_all(other_borrows_of_local); return; } @@ -224,7 +224,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> { places_conflict::PlaceConflictBias::NoOverlap) }); - sets.kill_all(definitely_conflicting_borrows); + trans.kill_all(definitely_conflicting_borrows); } } } @@ -236,21 +236,24 @@ impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> { self.borrow_set.borrows.len() * 2 } - fn start_block_effect(&self, _entry_set: &mut BitSet) { + fn start_block_effect(&self, _entry_set: &mut BitSet) { // no borrows of code region_scopes have been taken prior to - // function execution, so this method has no effect on - // `_sets`. + // function execution, so this method has no effect. } fn before_statement_effect(&self, - sets: &mut BlockSets<'_, BorrowIndex>, + trans: &mut GenKillSet, location: Location) { - debug!("Borrows::before_statement_effect sets: {:?} location: {:?}", sets, location); - self.kill_loans_out_of_scope_at_location(sets, location); + debug!("Borrows::before_statement_effect trans: {:?} location: {:?}", + trans, location); + self.kill_loans_out_of_scope_at_location(trans, location); } - fn statement_effect(&self, sets: &mut BlockSets<'_, BorrowIndex>, location: Location) { - debug!("Borrows::statement_effect: sets={:?} location={:?}", sets, location); + fn statement_effect(&self, + trans: &mut GenKillSet, + location: Location) { + debug!("Borrows::statement_effect: trans={:?} location={:?}", + trans, location); let block = &self.body.basic_blocks().get(location.block).unwrap_or_else(|| { panic!("could not find block at location {:?}", location); @@ -264,7 +267,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> { mir::StatementKind::Assign(ref lhs, ref rhs) => { // Make sure there are no remaining borrows for variables // that are assigned over. - self.kill_borrows_on_place(sets, lhs); + self.kill_borrows_on_place(trans, lhs); if let mir::Rvalue::Ref(_, _, ref place) = **rhs { if place.ignore_borrow( @@ -278,20 +281,20 @@ impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> { panic!("could not find BorrowIndex for location {:?}", location); }); - sets.gen(*index); + trans.gen(*index); } } mir::StatementKind::StorageDead(local) => { // Make sure there are no remaining borrows for locals that // are gone out of scope. - self.kill_borrows_on_place(sets, &Place::Base(PlaceBase::Local(local))); + self.kill_borrows_on_place(trans, &Place::Base(PlaceBase::Local(local))); } mir::StatementKind::InlineAsm(ref asm) => { for (output, kind) in asm.outputs.iter().zip(&asm.asm.outputs) { if !kind.is_indirect && !kind.is_rw { - self.kill_borrows_on_place(sets, output); + self.kill_borrows_on_place(trans, output); } } } @@ -307,13 +310,16 @@ impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> { } fn before_terminator_effect(&self, - sets: &mut BlockSets<'_, BorrowIndex>, + trans: &mut GenKillSet, location: Location) { - debug!("Borrows::before_terminator_effect sets: {:?} location: {:?}", sets, location); - self.kill_loans_out_of_scope_at_location(sets, location); + debug!("Borrows::before_terminator_effect: trans={:?} location={:?}", + trans, location); + self.kill_loans_out_of_scope_at_location(trans, location); } - fn terminator_effect(&self, _: &mut BlockSets<'_, BorrowIndex>, _: Location) {} + fn terminator_effect(&self, + _: &mut GenKillSet, + _: Location) {} fn propagate_call_return( &self, diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index 0a572d2093622..ad92c8783d36c 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -12,7 +12,7 @@ use super::MoveDataParamEnv; use crate::util::elaborate_drops::DropFlagState; use super::move_paths::{HasMoveData, MoveData, MovePathIndex, InitIndex, InitKind}; -use super::{BitDenotation, BlockSets, InitialFlow}; +use super::{BitDenotation, InitialFlow, GenKillSet}; use super::drop_flag_effects_for_function_entry; use super::drop_flag_effects_for_location; @@ -226,34 +226,37 @@ impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> { } impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> { - fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex, + fn update_bits(trans: &mut GenKillSet, + path: MovePathIndex, state: DropFlagState) { match state { - DropFlagState::Absent => sets.kill(path), - DropFlagState::Present => sets.gen(path), + DropFlagState::Absent => trans.kill(path), + DropFlagState::Present => trans.gen(path), } } } impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> { - fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex, + fn update_bits(trans: &mut GenKillSet, + path: MovePathIndex, state: DropFlagState) { match state { - DropFlagState::Absent => sets.gen(path), - DropFlagState::Present => sets.kill(path), + DropFlagState::Absent => trans.gen(path), + DropFlagState::Present => trans.kill(path), } } } impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> { - fn update_bits(sets: &mut BlockSets<'_, MovePathIndex>, path: MovePathIndex, + fn update_bits(trans: &mut GenKillSet, + path: MovePathIndex, state: DropFlagState) { match state { - DropFlagState::Absent => sets.kill(path), - DropFlagState::Present => sets.gen(path), + DropFlagState::Absent => trans.kill(path), + DropFlagState::Present => trans.gen(path), } } } @@ -275,24 +278,24 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeInitializedPlaces<'a, 'tcx> { } fn statement_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } fn terminator_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } @@ -333,24 +336,24 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> { } fn statement_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } fn terminator_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } @@ -389,24 +392,24 @@ impl<'a, 'tcx> BitDenotation<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> { } fn statement_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } fn terminator_effect(&self, - sets: &mut BlockSets<'_, MovePathIndex>, + trans: &mut GenKillSet, location: Location) { drop_flag_effects_for_location( self.tcx, self.body, self.mdpe, location, - |path, s| Self::update_bits(sets, path, s) + |path, s| Self::update_bits(trans, path, s) ) } @@ -439,7 +442,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> { } fn statement_effect(&self, - sets: &mut BlockSets<'_, InitIndex>, + trans: &mut GenKillSet, location: Location) { let (_, body, move_data) = (self.tcx, self.body, self.move_data()); let stmt = &body[location.block].statements[location.statement_index]; @@ -449,7 +452,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> { debug!("statement {:?} at loc {:?} initializes move_indexes {:?}", stmt, location, &init_loc_map[location]); - sets.gen_all(&init_loc_map[location]); + trans.gen_all(&init_loc_map[location]); match stmt.kind { mir::StatementKind::StorageDead(local) => { @@ -458,14 +461,14 @@ impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> { let move_path_index = rev_lookup.find_local(local); debug!("stmt {:?} at loc {:?} clears the ever initialized status of {:?}", stmt, location, &init_path_map[move_path_index]); - sets.kill_all(&init_path_map[move_path_index]); + trans.kill_all(&init_path_map[move_path_index]); } _ => {} } } fn terminator_effect(&self, - sets: &mut BlockSets<'_, InitIndex>, + trans: &mut GenKillSet, location: Location) { let (body, move_data) = (self.body, self.move_data()); @@ -473,7 +476,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> { let init_loc_map = &move_data.init_loc_map; debug!("terminator {:?} at loc {:?} initializes move_indexes {:?}", term, location, &init_loc_map[location]); - sets.gen_all( + trans.gen_all( init_loc_map[location].iter().filter(|init_index| { move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly }) diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index d7575b0f441e6..a717a3947e5cf 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -26,24 +26,24 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> { self.body.local_decls.len() } - fn start_block_effect(&self, _sets: &mut BitSet) { + fn start_block_effect(&self, _on_entry: &mut BitSet) { // Nothing is live on function entry } fn statement_effect(&self, - sets: &mut BlockSets<'_, Local>, + trans: &mut GenKillSet, loc: Location) { let stmt = &self.body[loc.block].statements[loc.statement_index]; match stmt.kind { - StatementKind::StorageLive(l) => sets.gen(l), - StatementKind::StorageDead(l) => sets.kill(l), + StatementKind::StorageLive(l) => trans.gen(l), + StatementKind::StorageDead(l) => trans.kill(l), _ => (), } } fn terminator_effect(&self, - _sets: &mut BlockSets<'_, Local>, + _trans: &mut GenKillSet, _loc: Location) { // Terminators have no effect } diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index 0728d5b21bbd1..d4a0ec78c5b80 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -131,7 +131,7 @@ pub(crate) fn do_dataflow<'a, 'tcx, BD, P>( p: P, ) -> DataflowResults<'tcx, BD> where - BD: BitDenotation<'tcx> + InitialFlow, + BD: BitDenotation<'tcx>, P: Fn(&BD, BD::Idx) -> DebugFormatted, { let flow_state = DataflowAnalysis::new(body, dead_unwinds, bd); @@ -199,42 +199,27 @@ where } fn build_sets(&mut self) { - // First we need to build the entry-, gen- and kill-sets. - - { - let sets = &mut self.flow_state.sets.for_block(mir::START_BLOCK.index()); - self.flow_state.operator.start_block_effect(&mut sets.on_entry); - } - + // Build the transfer function for each block. for (bb, data) in self.body.basic_blocks().iter_enumerated() { let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data; - let mut interim_state; - let sets = &mut self.flow_state.sets.for_block(bb.index()); - let track_intrablock = BD::accumulates_intrablock_state(); - if track_intrablock { - debug!("swapping in mutable on_entry, initially {:?}", sets.on_entry); - interim_state = sets.on_entry.to_owned(); - sets.on_entry = &mut interim_state; - } + let trans = self.flow_state.sets.trans_mut_for(bb.index()); for j_stmt in 0..statements.len() { let location = Location { block: bb, statement_index: j_stmt }; - self.flow_state.operator.before_statement_effect(sets, location); - self.flow_state.operator.statement_effect(sets, location); - if track_intrablock { - sets.apply_local_effect(); - } + self.flow_state.operator.before_statement_effect(trans, location); + self.flow_state.operator.statement_effect(trans, location); } if terminator.is_some() { let location = Location { block: bb, statement_index: statements.len() }; - self.flow_state.operator.before_terminator_effect(sets, location); - self.flow_state.operator.terminator_effect(sets, location); - if track_intrablock { - sets.apply_local_effect(); - } + self.flow_state.operator.before_terminator_effect(trans, location); + self.flow_state.operator.terminator_effect(trans, location); } } + + // Initialize the flow state at entry to the start block. + let on_entry = self.flow_state.sets.entry_set_mut_for(mir::START_BLOCK.index()); + self.flow_state.operator.start_block_effect(on_entry); } } @@ -247,14 +232,12 @@ where WorkQueue::with_all(self.builder.body.basic_blocks().len()); let body = self.builder.body; while let Some(bb) = dirty_queue.pop() { + let (on_entry, trans) = self.builder.flow_state.sets.get_mut(bb.index()); + debug_assert!(in_out.words().len() == on_entry.words().len()); + in_out.overwrite(on_entry); + trans.apply(in_out); + let bb_data = &body[bb]; - { - let sets = self.builder.flow_state.sets.for_block(bb.index()); - debug_assert!(in_out.words().len() == sets.on_entry.words().len()); - in_out.overwrite(sets.on_entry); - in_out.union(sets.gen_set); - in_out.subtract(sets.kill_set); - } self.builder.propagate_bits_into_graph_successors_of( in_out, (bb, bb_data), &mut dirty_queue); } @@ -366,33 +349,27 @@ pub fn state_for_location<'tcx, T: BitDenotation<'tcx>>(loc: Location, result: &DataflowResults<'tcx, T>, body: &Body<'tcx>) -> BitSet { - let mut on_entry = result.sets().on_entry_set_for(loc.block.index()).to_owned(); - let mut kill_set = on_entry.to_hybrid(); - let mut gen_set = kill_set.clone(); - - { - let mut sets = BlockSets { - on_entry: &mut on_entry, - kill_set: &mut kill_set, - gen_set: &mut gen_set, - }; + let mut trans = GenKill::from_elem(HybridBitSet::new_empty(analysis.bits_per_block())); - for stmt in 0..loc.statement_index { - let mut stmt_loc = loc; - stmt_loc.statement_index = stmt; - analysis.before_statement_effect(&mut sets, stmt_loc); - analysis.statement_effect(&mut sets, stmt_loc); - } + for stmt in 0..loc.statement_index { + let mut stmt_loc = loc; + stmt_loc.statement_index = stmt; + analysis.before_statement_effect(&mut trans, stmt_loc); + analysis.statement_effect(&mut trans, stmt_loc); + } - // Apply the pre-statement effect of the statement we're evaluating. - if loc.statement_index == body[loc.block].statements.len() { - analysis.before_terminator_effect(&mut sets, loc); - } else { - analysis.before_statement_effect(&mut sets, loc); - } + // Apply the pre-statement effect of the statement we're evaluating. + if loc.statement_index == body[loc.block].statements.len() { + analysis.before_terminator_effect(&mut trans, loc); + } else { + analysis.before_statement_effect(&mut trans, loc); } - gen_set.to_dense() + // Apply the transfer function for all preceding statements to the fixpoint + // at the start of the block. + let mut state = result.sets().entry_set_for(loc.block.index()).to_owned(); + trans.apply(&mut state); + state } pub struct DataflowAnalysis<'a, 'tcx, O> @@ -462,36 +439,8 @@ impl<'tcx, O: BitDenotation<'tcx>> DataflowState<'tcx, O> { } } -#[derive(Debug)] -pub struct AllSets { - /// Analysis bitwidth for each block. - bits_per_block: usize, - - /// For each block, bits valid on entry to the block. - on_entry_sets: Vec>, - - /// For each block, bits generated by executing the statements + - /// terminator in the block -- with one caveat. In particular, for - /// *call terminators*, the effect of storing the destination is - /// not included, since that only takes effect on the **success** - /// edge (and not the unwind edge). - gen_sets: Vec>, - - /// For each block, bits killed by executing the statements + - /// terminator in the block -- with one caveat. In particular, for - /// *call terminators*, the effect of storing the destination is - /// not included, since that only takes effect on the **success** - /// edge (and not the unwind edge). - kill_sets: Vec>, -} - -/// Triple of sets associated with a given block. -/// -/// Generally, one sets up `on_entry`, `gen_set`, and `kill_set` for -/// each block individually, and then runs the dataflow analysis which -/// iteratively modifies the various `on_entry` sets (but leaves the -/// other two sets unchanged, since they represent the effect of the -/// block, which should be invariant over the course of the analysis). +/// A 2-tuple representing the "gen" and "kill" bitsets during +/// dataflow analysis. /// /// It is best to ensure that the intersection of `gen_set` and /// `kill_set` is empty; otherwise the results of the dataflow will @@ -499,21 +448,32 @@ pub struct AllSets { /// killed during the iteration. (This is such a good idea that the /// `fn gen` and `fn kill` methods that set their state enforce this /// for you.) -#[derive(Debug)] -pub struct BlockSets<'a, E: Idx> { - /// Dataflow state immediately before control flow enters the given block. - pub(crate) on_entry: &'a mut BitSet, +#[derive(Debug, Clone, Copy)] +pub struct GenKill { + pub(crate) gen_set: T, + pub(crate) kill_set: T, +} - /// Bits that are set to 1 by the time we exit the given block. Hybrid - /// because it usually contains only 0 or 1 elements. - pub(crate) gen_set: &'a mut HybridBitSet, +type GenKillSet = GenKill>; - /// Bits that are set to 0 by the time we exit the given block. Hybrid - /// because it usually contains only 0 or 1 elements. - pub(crate) kill_set: &'a mut HybridBitSet, +impl GenKill { + /// Creates a new tuple where `gen_set == kill_set == elem`. + pub(crate) fn from_elem(elem: T) -> Self + where T: Clone + { + GenKill { + gen_set: elem.clone(), + kill_set: elem, + } + } } -impl<'a, E:Idx> BlockSets<'a, E> { +impl GenKillSet { + pub(crate) fn clear(&mut self) { + self.gen_set.clear(); + self.kill_set.clear(); + } + fn gen(&mut self, e: E) { self.gen_set.insert(e); self.kill_set.remove(e); @@ -541,30 +501,53 @@ impl<'a, E:Idx> BlockSets<'a, E> { } } - fn apply_local_effect(&mut self) { - self.on_entry.union(self.gen_set); - self.on_entry.subtract(self.kill_set); + /// Computes `(set ∪ gen) - kill` and assigns the result to `set`. + pub(crate) fn apply(&self, set: &mut BitSet) { + set.union(&self.gen_set); + set.subtract(&self.kill_set); } } +#[derive(Debug)] +pub struct AllSets { + /// Analysis bitwidth for each block. + bits_per_block: usize, + + /// For each block, bits valid on entry to the block. + on_entry: Vec>, + + /// The transfer function of each block expressed as the set of bits + /// generated and killed by executing the statements + terminator in the + /// block -- with one caveat. In particular, for *call terminators*, the + /// effect of storing the destination is not included, since that only takes + /// effect on the **success** edge (and not the unwind edge). + trans: Vec>, +} + impl AllSets { pub fn bits_per_block(&self) -> usize { self.bits_per_block } - pub fn for_block(&mut self, block_idx: usize) -> BlockSets<'_, E> { - BlockSets { - on_entry: &mut self.on_entry_sets[block_idx], - gen_set: &mut self.gen_sets[block_idx], - kill_set: &mut self.kill_sets[block_idx], - } + + pub fn get_mut(&mut self, block_idx: usize) -> (&mut BitSet, &mut GenKillSet) { + (&mut self.on_entry[block_idx], &mut self.trans[block_idx]) } - pub fn on_entry_set_for(&self, block_idx: usize) -> &BitSet { - &self.on_entry_sets[block_idx] + pub fn trans_for(&self, block_idx: usize) -> &GenKillSet { + &self.trans[block_idx] + } + pub fn trans_mut_for(&mut self, block_idx: usize) -> &mut GenKillSet { + &mut self.trans[block_idx] + } + pub fn entry_set_for(&self, block_idx: usize) -> &BitSet { + &self.on_entry[block_idx] + } + pub fn entry_set_mut_for(&mut self, block_idx: usize) -> &mut BitSet { + &mut self.on_entry[block_idx] } pub fn gen_set_for(&self, block_idx: usize) -> &HybridBitSet { - &self.gen_sets[block_idx] + &self.trans_for(block_idx).gen_set } pub fn kill_set_for(&self, block_idx: usize) -> &HybridBitSet { - &self.kill_sets[block_idx] + &self.trans_for(block_idx).kill_set } } @@ -579,35 +562,18 @@ pub trait InitialFlow { fn bottom_value() -> bool; } -pub trait BitDenotation<'tcx>: BitSetOperator { +/// A specific flavor of dataflow analysis. +/// +/// To run a dataflow analysis, one sets up an initial state for the +/// `START_BLOCK` via `start_block_effect` and a transfer function (`trans`) +/// for each block individually. The entry set for all other basic blocks is +/// initialized to `InitialFlow::bottom_value`. The dataflow analysis then +/// iteratively modifies the various entry sets (but leaves the the transfer +/// function unchanged). +pub trait BitDenotation<'tcx>: BitSetOperator + InitialFlow { /// Specifies what index type is used to access the bitvector. type Idx: Idx; - /// Some analyses want to accumulate knowledge within a block when - /// analyzing its statements for building the gen/kill sets. Override - /// this method to return true in such cases. - /// - /// When this returns true, the statement-effect (re)construction - /// will clone the `on_entry` state and pass along a reference via - /// `sets.on_entry` to that local clone into `statement_effect` and - /// `terminator_effect`). - /// - /// When it's false, no local clone is constructed; instead a - /// reference directly into `on_entry` is passed along via - /// `sets.on_entry` instead, which represents the flow state at - /// the block's start, not necessarily the state immediately prior - /// to the statement/terminator under analysis. - /// - /// In either case, the passed reference is mutable, but this is a - /// wart from using the `BlockSets` type in the API; the intention - /// is that the `statement_effect` and `terminator_effect` methods - /// mutate only the gen/kill sets. - // - // FIXME: we should consider enforcing the intention described in - // the previous paragraph by passing the three sets in separate - // parameters to encode their distinct mutabilities. - fn accumulates_intrablock_state() -> bool { false } - /// A name describing the dataflow analysis that this /// `BitDenotation` is supporting. The name should be something /// suitable for plugging in as part of a filename (i.e., avoid @@ -640,7 +606,7 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// applied, in that order, before moving for the next /// statement. fn before_statement_effect(&self, - _sets: &mut BlockSets<'_, Self::Idx>, + _trans: &mut GenKillSet, _location: Location) {} /// Mutates the block-sets (the flow sets for the given @@ -654,7 +620,7 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// `bb_data` is the sequence of statements identified by `bb` in /// the MIR. fn statement_effect(&self, - sets: &mut BlockSets<'_, Self::Idx>, + trans: &mut GenKillSet, location: Location); /// Similar to `terminator_effect`, except it applies @@ -669,7 +635,7 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// applied, in that order, before moving for the next /// terminator. fn before_terminator_effect(&self, - _sets: &mut BlockSets<'_, Self::Idx>, + _trans: &mut GenKillSet, _location: Location) {} /// Mutates the block-sets (the flow sets for the given @@ -683,7 +649,7 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// The effects applied here cannot depend on which branch the /// terminator took. fn terminator_effect(&self, - sets: &mut BlockSets<'_, Self::Idx>, + trans: &mut GenKillSet, location: Location); /// Mutates the block-sets according to the (flow-dependent) @@ -718,17 +684,16 @@ impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx> { pub fn new(body: &'a Body<'tcx>, dead_unwinds: &'a BitSet, - denotation: D) -> Self where D: InitialFlow { + denotation: D) -> Self { let bits_per_block = denotation.bits_per_block(); let num_blocks = body.basic_blocks().len(); - let on_entry_sets = if D::bottom_value() { + let on_entry = if D::bottom_value() { vec![BitSet::new_filled(bits_per_block); num_blocks] } else { vec![BitSet::new_empty(bits_per_block); num_blocks] }; - let gen_sets = vec![HybridBitSet::new_empty(bits_per_block); num_blocks]; - let kill_sets = gen_sets.clone(); + let nop = GenKill::from_elem(HybridBitSet::new_empty(bits_per_block)); DataflowAnalysis { body, @@ -736,9 +701,8 @@ impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx> flow_state: DataflowState { sets: AllSets { bits_per_block, - on_entry_sets, - gen_sets, - kill_sets, + on_entry, + trans: vec![nop; num_blocks], }, operator: denotation, } @@ -836,7 +800,7 @@ where in_out: &BitSet, bb: mir::BasicBlock, dirty_queue: &mut WorkQueue) { - let entry_set = &mut self.flow_state.sets.for_block(bb.index()).on_entry; + let entry_set = self.flow_state.sets.entry_set_mut_for(bb.index()); let set_changed = self.flow_state.operator.join(entry_set, &in_out); if set_changed { dirty_queue.insert(bb); diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs index d805c66e3c360..b2b489b2e3884 100644 --- a/src/librustc_mir/transform/elaborate_drops.rs +++ b/src/librustc_mir/transform/elaborate_drops.rs @@ -95,7 +95,7 @@ fn find_dead_unwinds<'tcx>( }; let mut init_data = InitializationData { - live: flow_inits.sets().on_entry_set_for(bb.index()).to_owned(), + live: flow_inits.sets().entry_set_for(bb.index()).to_owned(), dead: BitSet::new_empty(env.move_data.move_paths.len()), }; debug!("find_dead_unwinds @ {:?}: {:?}; init_data={:?}", @@ -304,9 +304,9 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { fn initialization_data_at(&self, loc: Location) -> InitializationData { let mut data = InitializationData { - live: self.flow_inits.sets().on_entry_set_for(loc.block.index()) + live: self.flow_inits.sets().entry_set_for(loc.block.index()) .to_owned(), - dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index()) + dead: self.flow_uninits.sets().entry_set_for(loc.block.index()) .to_owned(), }; for stmt in 0..loc.statement_index { diff --git a/src/librustc_mir/transform/rustc_peek.rs b/src/librustc_mir/transform/rustc_peek.rs index c4601229653cf..f12309c1d0a13 100644 --- a/src/librustc_mir/transform/rustc_peek.rs +++ b/src/librustc_mir/transform/rustc_peek.rs @@ -18,7 +18,6 @@ use crate::dataflow::{ }; use crate::dataflow::move_paths::{MovePathIndex, LookupResult}; use crate::dataflow::move_paths::{HasMoveData, MoveData}; -use crate::dataflow; use crate::dataflow::has_rustc_mir_with; @@ -133,9 +132,8 @@ fn each_block<'tcx, O>( } }; - let mut on_entry = results.0.sets.on_entry_set_for(bb.index()).to_owned(); - let mut gen_set = results.0.sets.gen_set_for(bb.index()).clone(); - let mut kill_set = results.0.sets.kill_set_for(bb.index()).clone(); + let mut on_entry = results.0.sets.entry_set_for(bb.index()).to_owned(); + let mut trans = results.0.sets.trans_for(bb.index()).clone(); // Emulate effect of all statements in the block up to (but not // including) the borrow within `peek_arg_place`. Do *not* include @@ -143,10 +141,6 @@ fn each_block<'tcx, O>( // of the argument at time immediate preceding Call to // `rustc_peek`). - let mut sets = dataflow::BlockSets { on_entry: &mut on_entry, - gen_set: &mut gen_set, - kill_set: &mut kill_set }; - for (j, stmt) in statements.iter().enumerate() { debug!("rustc_peek: ({:?},{}) {:?}", bb, j, stmt); let (place, rvalue) = match stmt.kind { @@ -170,7 +164,7 @@ fn each_block<'tcx, O>( // Okay, our search is over. match move_data.rev_lookup.find(peeking_at_place) { LookupResult::Exact(peek_mpi) => { - let bit_state = sets.on_entry.contains(peek_mpi); + let bit_state = on_entry.contains(peek_mpi); debug!("rustc_peek({:?} = &{:?}) bit_state: {}", place, peeking_at_place, bit_state); if !bit_state { @@ -197,18 +191,18 @@ fn each_block<'tcx, O>( debug!("rustc_peek: computing effect on place: {:?} ({:?}) in stmt: {:?}", place, lhs_mpi, stmt); // reset GEN and KILL sets before emulating their effect. - sets.gen_set.clear(); - sets.kill_set.clear(); + trans.clear(); results.0.operator.before_statement_effect( - &mut sets, Location { block: bb, statement_index: j }); + &mut trans, + Location { block: bb, statement_index: j }); results.0.operator.statement_effect( - &mut sets, Location { block: bb, statement_index: j }); - sets.on_entry.union(sets.gen_set); - sets.on_entry.subtract(sets.kill_set); + &mut trans, + Location { block: bb, statement_index: j }); + trans.apply(&mut on_entry); } results.0.operator.before_terminator_effect( - &mut sets, + &mut trans, Location { block: bb, statement_index: statements.len() }); tcx.sess.span_err(span, &format!("rustc_peek: MIR did not match \ From c8cbd4fc784e5d432c02b0dc14a592f112dab59f Mon Sep 17 00:00:00 2001 From: Dylan MacKenzie Date: Fri, 21 Jun 2019 12:39:01 -0700 Subject: [PATCH 2/2] Merge `BitSetOperator` and `InitialFlow` into one trait. Since the value of `InitialFlow` defines the semantics of the `join` operation, there's no reason to have seperate traits for each. We can add a default impl of `join` which branches based on `BOTTOM_VALUE`. This should get optimized away. --- src/librustc_data_structures/bit_set.rs | 5 -- .../dataflow/impls/borrowed_locals.rs | 15 +--- src/librustc_mir/dataflow/impls/borrows.rs | 19 ++--- src/librustc_mir/dataflow/impls/mod.rs | 74 ++++--------------- .../dataflow/impls/storage_liveness.rs | 15 +--- src/librustc_mir/dataflow/mod.rs | 38 +++++++--- 6 files changed, 51 insertions(+), 115 deletions(-) diff --git a/src/librustc_data_structures/bit_set.rs b/src/librustc_data_structures/bit_set.rs index 7a11ca070071b..f4a60b4d2c192 100644 --- a/src/librustc_data_structures/bit_set.rs +++ b/src/librustc_data_structures/bit_set.rs @@ -273,11 +273,6 @@ impl<'a, T: Idx> Iterator for BitIter<'a, T> { } } -pub trait BitSetOperator { - /// Combine one bitset into another. - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool; -} - #[inline] fn bitwise(out_vec: &mut [Word], in_vec: &[Word], op: Op) -> bool where Op: Fn(Word, Word) -> Word diff --git a/src/librustc_mir/dataflow/impls/borrowed_locals.rs b/src/librustc_mir/dataflow/impls/borrowed_locals.rs index 87ee254fb74bc..0f7f37f2db8b4 100644 --- a/src/librustc_mir/dataflow/impls/borrowed_locals.rs +++ b/src/librustc_mir/dataflow/impls/borrowed_locals.rs @@ -83,18 +83,9 @@ impl<'a, 'tcx> BitDenotation<'tcx> for HaveBeenBorrowedLocals<'a, 'tcx> { } } -impl<'a, 'tcx> BitSetOperator for HaveBeenBorrowedLocals<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // "maybe" means we union effects of both preds - } -} - -impl<'a, 'tcx> InitialFlow for HaveBeenBorrowedLocals<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = unborrowed - } +impl<'a, 'tcx> BottomValue for HaveBeenBorrowedLocals<'a, 'tcx> { + // bottom = unborrowed + const BOTTOM_VALUE: bool = false; } struct BorrowedLocalsVisitor<'gk> { diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index b29a4f351b58b..53d00d44e3f55 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -5,11 +5,11 @@ use rustc::mir::{self, Location, Place, PlaceBase, Body}; use rustc::ty::TyCtxt; use rustc::ty::RegionVid; -use rustc_data_structures::bit_set::{BitSet, BitSetOperator}; +use rustc_data_structures::bit_set::BitSet; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use crate::dataflow::{BitDenotation, InitialFlow, GenKillSet}; +use crate::dataflow::{BitDenotation, BottomValue, GenKillSet}; use crate::borrow_check::nll::region_infer::RegionInferenceContext; use crate::borrow_check::nll::ToRegionVid; use crate::borrow_check::places_conflict; @@ -331,16 +331,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for Borrows<'a, 'tcx> { } } -impl<'a, 'tcx> BitSetOperator for Borrows<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // "maybe" means we union effects of both preds - } -} - -impl<'a, 'tcx> InitialFlow for Borrows<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = nothing is reserved or activated yet - } +impl<'a, 'tcx> BottomValue for Borrows<'a, 'tcx> { + /// bottom = nothing is reserved or activated yet; + const BOTTOM_VALUE: bool = false; } diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index ad92c8783d36c..065cfe8a4e823 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -4,7 +4,7 @@ use rustc::ty::TyCtxt; use rustc::mir::{self, Body, Location}; -use rustc_data_structures::bit_set::{BitSet, BitSetOperator}; +use rustc_data_structures::bit_set::BitSet; use rustc_data_structures::indexed_vec::Idx; use super::MoveDataParamEnv; @@ -12,7 +12,7 @@ use super::MoveDataParamEnv; use crate::util::elaborate_drops::DropFlagState; use super::move_paths::{HasMoveData, MoveData, MovePathIndex, InitIndex, InitKind}; -use super::{BitDenotation, InitialFlow, GenKillSet}; +use super::{BitDenotation, BottomValue, GenKillSet}; use super::drop_flag_effects_for_function_entry; use super::drop_flag_effects_for_location; @@ -505,68 +505,22 @@ impl<'a, 'tcx> BitDenotation<'tcx> for EverInitializedPlaces<'a, 'tcx> { } } -impl<'a, 'tcx> BitSetOperator for MaybeInitializedPlaces<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // "maybe" means we union effects of both preds - } -} - -impl<'a, 'tcx> BitSetOperator for MaybeUninitializedPlaces<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // "maybe" means we union effects of both preds - } -} - -impl<'a, 'tcx> BitSetOperator for DefinitelyInitializedPlaces<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.intersect(in_set) // "definitely" means we intersect effects of both preds - } -} - -impl<'a, 'tcx> BitSetOperator for EverInitializedPlaces<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // inits from both preds are in scope - } -} - -// The way that dataflow fixed point iteration works, you want to -// start at bottom and work your way to a fixed point. Control-flow -// merges will apply the `join` operator to each block entry's current -// state (which starts at that bottom value). -// -// This means, for propagation across the graph, that you either want -// to start at all-zeroes and then use Union as your merge when -// propagating, or you start at all-ones and then use Intersect as -// your merge when propagating. - -impl<'a, 'tcx> InitialFlow for MaybeInitializedPlaces<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = uninitialized - } +impl<'a, 'tcx> BottomValue for MaybeInitializedPlaces<'a, 'tcx> { + /// bottom = uninitialized + const BOTTOM_VALUE: bool = false; } -impl<'a, 'tcx> InitialFlow for MaybeUninitializedPlaces<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = initialized (start_block_effect counters this at outset) - } +impl<'a, 'tcx> BottomValue for MaybeUninitializedPlaces<'a, 'tcx> { + /// bottom = initialized (start_block_effect counters this at outset) + const BOTTOM_VALUE: bool = false; } -impl<'a, 'tcx> InitialFlow for DefinitelyInitializedPlaces<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - true // bottom = initialized (start_block_effect counters this at outset) - } +impl<'a, 'tcx> BottomValue for DefinitelyInitializedPlaces<'a, 'tcx> { + /// bottom = initialized (start_block_effect counters this at outset) + const BOTTOM_VALUE: bool = true; } -impl<'a, 'tcx> InitialFlow for EverInitializedPlaces<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = no initialized variables by default - } +impl<'a, 'tcx> BottomValue for EverInitializedPlaces<'a, 'tcx> { + /// bottom = no initialized variables by default + const BOTTOM_VALUE: bool = false; } diff --git a/src/librustc_mir/dataflow/impls/storage_liveness.rs b/src/librustc_mir/dataflow/impls/storage_liveness.rs index a717a3947e5cf..d2003993d4506 100644 --- a/src/librustc_mir/dataflow/impls/storage_liveness.rs +++ b/src/librustc_mir/dataflow/impls/storage_liveness.rs @@ -59,16 +59,7 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> { } } -impl<'a, 'tcx> BitSetOperator for MaybeStorageLive<'a, 'tcx> { - #[inline] - fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { - inout_set.union(in_set) // "maybe" means we union effects of both preds - } -} - -impl<'a, 'tcx> InitialFlow for MaybeStorageLive<'a, 'tcx> { - #[inline] - fn bottom_value() -> bool { - false // bottom = dead - } +impl<'a, 'tcx> BottomValue for MaybeStorageLive<'a, 'tcx> { + /// bottom = dead + const BOTTOM_VALUE: bool = false; } diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index d4a0ec78c5b80..444f73d73565e 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -1,7 +1,7 @@ use syntax::ast::{self, MetaItem}; use syntax::symbol::{Symbol, sym}; -use rustc_data_structures::bit_set::{BitSet, BitSetOperator, HybridBitSet}; +use rustc_data_structures::bit_set::{BitSet, HybridBitSet}; use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::work_queue::WorkQueue; @@ -552,14 +552,28 @@ impl AllSets { } /// Parameterization for the precise form of data flow that is used. -/// `InitialFlow` handles initializing the bitvectors before any -/// code is inspected by the analysis. Analyses that need more nuanced -/// initialization (e.g., they need to consult the results of some other -/// dataflow analysis to set up the initial bitvectors) should not -/// implement this. -pub trait InitialFlow { - /// Specifies the initial value for each bit in the `on_entry` set - fn bottom_value() -> bool; +/// +/// `BottomValue` determines whether the initial entry set for each basic block is empty or full. +/// This also determines the semantics of the lattice `join` operator used to merge dataflow +/// results, since dataflow works by starting at the bottom and moving monotonically to a fixed +/// point. +/// +/// This means, for propagation across the graph, that you either want to start at all-zeroes and +/// then use Union as your merge when propagating, or you start at all-ones and then use Intersect +/// as your merge when propagating. +pub trait BottomValue { + /// Specifies the initial value for each bit in the entry set for each basic block. + const BOTTOM_VALUE: bool; + + /// Merges `in_set` into `inout_set`, returning `true` if `inout_set` changed. + #[inline] + fn join(&self, inout_set: &mut BitSet, in_set: &BitSet) -> bool { + if Self::BOTTOM_VALUE == false { + inout_set.union(in_set) + } else { + inout_set.intersect(in_set) + } + } } /// A specific flavor of dataflow analysis. @@ -567,10 +581,10 @@ pub trait InitialFlow { /// To run a dataflow analysis, one sets up an initial state for the /// `START_BLOCK` via `start_block_effect` and a transfer function (`trans`) /// for each block individually. The entry set for all other basic blocks is -/// initialized to `InitialFlow::bottom_value`. The dataflow analysis then +/// initialized to `Self::BOTTOM_VALUE`. The dataflow analysis then /// iteratively modifies the various entry sets (but leaves the the transfer /// function unchanged). -pub trait BitDenotation<'tcx>: BitSetOperator + InitialFlow { +pub trait BitDenotation<'tcx>: BottomValue { /// Specifies what index type is used to access the bitvector. type Idx: Idx; @@ -688,7 +702,7 @@ impl<'a, 'tcx, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation<'tcx> let bits_per_block = denotation.bits_per_block(); let num_blocks = body.basic_blocks().len(); - let on_entry = if D::bottom_value() { + let on_entry = if D::BOTTOM_VALUE == true { vec![BitSet::new_filled(bits_per_block); num_blocks] } else { vec![BitSet::new_empty(bits_per_block); num_blocks]