diff --git a/src/librustc_mir/borrow_check/nll/region_infer/dfs.rs b/src/librustc_mir/borrow_check/nll/region_infer/dfs.rs index d55b601823245..4fcd3118f9108 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/dfs.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/dfs.rs @@ -18,9 +18,26 @@ use borrow_check::nll::region_infer::values::{RegionElementIndex, RegionValueEle use syntax::codemap::Span; use rustc::mir::{Location, Mir}; use rustc::ty::RegionVid; -use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::bitvec::BitVector; +use rustc_data_structures::indexed_vec::Idx; + +pub(super) struct DfsStorage { + stack: Vec, + visited: BitVector, +} impl<'tcx> RegionInferenceContext<'tcx> { + /// Creates dfs storage for use by dfs; this should be shared + /// across as many calls to dfs as possible to amortize allocation + /// costs. + pub(super) fn new_dfs_storage(&self) -> DfsStorage { + let num_elements = self.elements.num_elements(); + DfsStorage { + stack: vec![], + visited: BitVector::new(num_elements), + } + } + /// Function used to satisfy or test a `R1: R2 @ P` /// constraint. The core idea is that it performs a DFS starting /// from `P`. The precise actions *during* that DFS depend on the @@ -34,17 +51,21 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// - `Ok(false)` if the walk was completed with no changes; /// - `Err(early)` if the walk was existed early by `op`. `earlyelem` is the /// value that `op` returned. - pub(super) fn dfs(&self, mir: &Mir<'tcx>, mut op: C) -> Result + #[inline(never)] // ensure dfs is identifiable in profiles + pub(super) fn dfs( + &self, + mir: &Mir<'tcx>, + dfs: &mut DfsStorage, + mut op: C, + ) -> Result where C: DfsOp, { let mut changed = false; - let mut stack = vec![]; - let mut visited = FxHashSet(); - - stack.push(op.start_point()); - while let Some(p) = stack.pop() { + dfs.visited.clear(); + dfs.stack.push(op.start_point()); + while let Some(p) = dfs.stack.pop() { let point_index = self.elements.index(p); if !op.source_region_contains(point_index) { @@ -52,7 +73,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { continue; } - if !visited.insert(p) { + if !dfs.visited.insert(point_index.index()) { debug!(" already visited"); continue; } @@ -62,25 +83,27 @@ impl<'tcx> RegionInferenceContext<'tcx> { let block_data = &mir[p.block]; - let start_stack_len = stack.len(); + let start_stack_len = dfs.stack.len(); if p.statement_index < block_data.statements.len() { - stack.push(Location { + dfs.stack.push(Location { statement_index: p.statement_index + 1, ..p }); } else { - stack.extend(block_data.terminator().successors().iter().map( - |&basic_block| { - Location { + dfs.stack.extend( + block_data + .terminator() + .successors() + .iter() + .map(|&basic_block| Location { statement_index: 0, block: basic_block, - } - }, - )); + }), + ); } - if stack.len() == start_stack_len { + if dfs.stack.len() == start_stack_len { // If we reach the END point in the graph, then copy // over any skolemized end points in the `from_region` // and make sure they are included in the `to_region`. @@ -229,9 +252,8 @@ impl<'v, 'tcx> DfsOp for TestTargetOutlivesSource<'v, 'tcx> { // `X: ur_in_source`, OK. if self.inferred_values .universal_regions_outlived_by(self.target_region) - .any(|ur_in_target| { - self.universal_regions.outlives(ur_in_target, ur_in_source) - }) { + .any(|ur_in_target| self.universal_regions.outlives(ur_in_target, ur_in_source)) + { continue; } diff --git a/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs b/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs index 631b1d0f8941d..b0346abee5a5f 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs @@ -84,6 +84,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { sub, point, span, + next: _, } = constraint; with_msg(&format!( "{:?}: {:?} @ {:?} due to {:?}", diff --git a/src/librustc_mir/borrow_check/nll/region_infer/graphviz.rs b/src/librustc_mir/borrow_check/nll/region_infer/graphviz.rs index db773240809c5..6c4c02a36a0d7 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/graphviz.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/graphviz.rs @@ -55,7 +55,7 @@ impl<'this, 'tcx> dot::GraphWalk<'this> for RegionInferenceContext<'tcx> { vids.into_cow() } fn edges(&'this self) -> dot::Edges<'this, Constraint> { - (&self.constraints[..]).into_cow() + (&self.constraints.raw[..]).into_cow() } // Render `a: b` as `a <- b`, indicating the flow diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs index 66776a94ff01f..08391401cc696 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::collections::HashMap; - use super::universal_regions::UniversalRegions; use rustc::hir::def_id::DefId; use rustc::infer::InferCtxt; @@ -23,9 +21,9 @@ use rustc::mir::{ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegi Local, Location, Mir}; use rustc::traits::ObligationCause; use rustc::ty::{self, RegionVid, Ty, TypeFoldable}; -use rustc::util::common::ErrorReported; +use rustc::util::common::{self, ErrorReported}; use rustc_data_structures::bitvec::BitVector; -use rustc_data_structures::indexed_vec::IndexVec; +use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use std::fmt; use std::rc::Rc; use syntax::ast; @@ -61,8 +59,15 @@ pub struct RegionInferenceContext<'tcx> { /// until `solve` is invoked. inferred_values: Option, + /// For each variable, stores the index of the first constraint + /// where that variable appears on the RHS. This is the start of a + /// 'linked list' threaded by the `next` field in `Constraint`. + /// + /// This map is build when values are inferred. + dependency_map: Option>>, + /// The constraints we have accumulated and used during solving. - constraints: Vec, + constraints: IndexVec, /// Type constraints that we check after solving. type_tests: Vec>, @@ -143,10 +148,22 @@ pub struct Constraint { /// At this location. point: Location, + /// Later on, we thread the constraints onto a linked list + /// grouped by their `sub` field. So if you had: + /// + /// Index | Constraint | Next Field + /// ----- | ---------- | ---------- + /// 0 | `'a: 'b` | Some(2) + /// 1 | `'b: 'c` | None + /// 2 | `'c: 'b` | None + next: Option, + /// Where did this constraint arise? span: Span, } +newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" }); + /// A "type test" corresponds to an outlives constraint between a type /// and a lifetime, like `T: 'x` or `::Bar: 'x`. They are /// translated from the `Verify` region constraints in the ordinary @@ -259,7 +276,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { elements: elements.clone(), liveness_constraints: RegionValues::new(elements, num_region_variables), inferred_values: None, - constraints: Vec::new(), + dependency_map: None, + constraints: IndexVec::new(), type_tests: Vec::new(), universal_regions, }; @@ -387,6 +405,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { sup, sub, point, + next: None, }); } @@ -403,10 +422,25 @@ impl<'tcx> RegionInferenceContext<'tcx> { infcx: &InferCtxt<'_, 'gcx, 'tcx>, mir: &Mir<'tcx>, mir_def_id: DefId, + ) -> Option> { + common::time( + infcx.tcx.sess, + &format!("solve_nll_region_constraints({:?})", mir_def_id), + || self.solve_inner(infcx, mir, mir_def_id), + ) + } + + fn solve_inner<'gcx>( + &mut self, + infcx: &InferCtxt<'_, 'gcx, 'tcx>, + mir: &Mir<'tcx>, + mir_def_id: DefId, ) -> Option> { assert!(self.inferred_values.is_none(), "values already inferred"); - self.propagate_constraints(mir); + let dfs_storage = &mut self.new_dfs_storage(); + + self.propagate_constraints(mir, dfs_storage); // If this is a closure, we can propagate unsatisfied // `outlives_requirements` to our creator, so create a vector @@ -419,7 +453,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { None }; - self.check_type_tests(infcx, mir, mir_def_id, outlives_requirements.as_mut()); + self.check_type_tests( + infcx, + mir, + dfs_storage, + mir_def_id, + outlives_requirements.as_mut(), + ); self.check_universal_regions(infcx, mir_def_id, outlives_requirements.as_mut()); @@ -439,7 +479,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// Re-execute the region inference, this time tracking causal information. /// This is significantly slower, so it is done only when an error is being reported. pub(super) fn compute_causal_info(&self, mir: &Mir<'tcx>) -> RegionCausalInfo { - let inferred_values = self.compute_region_values(mir, TrackCauses(true)); + let dfs_storage = &mut self.new_dfs_storage(); + let inferred_values = self.compute_region_values(mir, dfs_storage, TrackCauses(true)); RegionCausalInfo { inferred_values } } @@ -447,12 +488,19 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// for each region variable until all the constraints are /// satisfied. Note that some values may grow **too** large to be /// feasible, but we check this later. - fn propagate_constraints(&mut self, mir: &Mir<'tcx>) { - let inferred_values = self.compute_region_values(mir, TrackCauses(false)); + fn propagate_constraints(&mut self, mir: &Mir<'tcx>, dfs_storage: &mut dfs::DfsStorage) { + self.dependency_map = Some(self.build_dependency_map()); + let inferred_values = self.compute_region_values(mir, dfs_storage, TrackCauses(false)); self.inferred_values = Some(inferred_values); } - fn compute_region_values(&self, mir: &Mir<'tcx>, track_causes: TrackCauses) -> RegionValues { + #[inline(never)] // ensure dfs is identifiable in profiles + fn compute_region_values( + &self, + mir: &Mir<'tcx>, + dfs_storage: &mut dfs::DfsStorage, + track_causes: TrackCauses, + ) -> RegionValues { debug!("compute_region_values()"); debug!("compute_region_values: constraints={:#?}", { let mut constraints: Vec<_> = self.constraints.iter().collect(); @@ -464,17 +512,17 @@ impl<'tcx> RegionInferenceContext<'tcx> { // constraints we have accumulated. let mut inferred_values = self.liveness_constraints.duplicate(track_causes); - let dependency_map = self.build_dependency_map(); + let dependency_map = self.dependency_map.as_ref().unwrap(); // Constraints that may need to be repropagated (initially all): - let mut dirty_list: Vec<_> = (0..self.constraints.len()).collect(); + let mut dirty_list: Vec<_> = self.constraints.indices().collect(); // Set to 0 for each constraint that is on the dirty list: let mut clean_bit_vec = BitVector::new(dirty_list.len()); debug!("propagate_constraints: --------------------"); while let Some(constraint_idx) = dirty_list.pop() { - clean_bit_vec.insert(constraint_idx); + clean_bit_vec.insert(constraint_idx.index()); let constraint = &self.constraints[constraint_idx]; debug!("propagate_constraints: constraint={:?}", constraint); @@ -483,6 +531,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // outlives constraint. let Ok(made_changes) = self.dfs( mir, + dfs_storage, CopyFromSourceToTarget { source_region: constraint.sub, target_region: constraint.sup, @@ -496,10 +545,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!("propagate_constraints: sub={:?}", constraint.sub); debug!("propagate_constraints: sup={:?}", constraint.sup); - for &dep_idx in dependency_map.get(&constraint.sup).unwrap_or(&vec![]) { - if clean_bit_vec.remove(dep_idx) { + let mut opt_dep_idx = dependency_map[constraint.sup]; + while let Some(dep_idx) = opt_dep_idx { + if clean_bit_vec.remove(dep_idx.index()) { dirty_list.push(dep_idx); } + opt_dep_idx = self.constraints[dep_idx].next; } } @@ -513,11 +564,15 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// indices of constraints that need to be re-evaluated when X changes. /// These are constraints like Y: X @ P -- so if X changed, we may /// need to grow Y. - fn build_dependency_map(&self) -> HashMap> { - let mut map = HashMap::new(); - - for (idx, constraint) in self.constraints.iter().enumerate() { - map.entry(constraint.sub).or_insert(Vec::new()).push(idx); + #[inline(never)] // ensure dfs is identifiable in profiles + fn build_dependency_map(&mut self) -> IndexVec> { + let mut map = IndexVec::from_elem(None, &self.definitions); + + for (idx, constraint) in self.constraints.iter_enumerated_mut().rev() { + let mut head = &mut map[constraint.sub]; + debug_assert!(constraint.next.is_none()); + constraint.next = *head; + *head = Some(idx); } map @@ -531,6 +586,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { &self, infcx: &InferCtxt<'_, 'gcx, 'tcx>, mir: &Mir<'tcx>, + dfs_storage: &mut dfs::DfsStorage, mir_def_id: DefId, mut propagated_outlives_requirements: Option<&mut Vec>>, ) { @@ -539,7 +595,13 @@ impl<'tcx> RegionInferenceContext<'tcx> { for type_test in &self.type_tests { debug!("check_type_test: {:?}", type_test); - if self.eval_region_test(mir, type_test.point, type_test.lower_bound, &type_test.test) { + if self.eval_region_test( + mir, + dfs_storage, + type_test.point, + type_test.lower_bound, + &type_test.test, + ) { continue; } @@ -796,6 +858,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn eval_region_test( &self, mir: &Mir<'tcx>, + dfs_storage: &mut dfs::DfsStorage, point: Location, lower_bound: RegionVid, test: &RegionTest, @@ -808,19 +871,19 @@ impl<'tcx> RegionInferenceContext<'tcx> { match test { RegionTest::IsOutlivedByAllRegionsIn(regions) => regions .iter() - .all(|&r| self.eval_outlives(mir, r, lower_bound, point)), + .all(|&r| self.eval_outlives(mir, dfs_storage, r, lower_bound, point)), RegionTest::IsOutlivedByAnyRegionIn(regions) => regions .iter() - .any(|&r| self.eval_outlives(mir, r, lower_bound, point)), + .any(|&r| self.eval_outlives(mir, dfs_storage, r, lower_bound, point)), RegionTest::Any(tests) => tests .iter() - .any(|test| self.eval_region_test(mir, point, lower_bound, test)), + .any(|test| self.eval_region_test(mir, dfs_storage, point, lower_bound, test)), RegionTest::All(tests) => tests .iter() - .all(|test| self.eval_region_test(mir, point, lower_bound, test)), + .all(|test| self.eval_region_test(mir, dfs_storage, point, lower_bound, test)), } } @@ -828,6 +891,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { fn eval_outlives( &self, mir: &Mir<'tcx>, + dfs_storage: &mut dfs::DfsStorage, sup_region: RegionVid, sub_region: RegionVid, point: Location, @@ -843,6 +907,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { // yield an `Err` result. match self.dfs( mir, + dfs_storage, TestTargetOutlivesSource { source_region: sub_region, target_region: sup_region,