Skip to content

Commit e1b36f5

Browse files
committed
Use DefPathHash instead of HirId to break cycles.
1 parent 0677edc commit e1b36f5

File tree

1 file changed

+8
-14
lines changed

1 file changed

+8
-14
lines changed

compiler/rustc_mir_transform/src/inline.rs

+8-14
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
//! Inlining pass for MIR functions
22
33
use rustc_attr::InlineAttr;
4-
use rustc_hir as hir;
54
use rustc_index::bit_set::BitSet;
65
use rustc_index::vec::Idx;
76
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
@@ -88,7 +87,6 @@ fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
8887
tcx,
8988
param_env,
9089
codegen_fn_attrs: tcx.codegen_fn_attrs(def_id),
91-
hir_id,
9290
history: Vec::new(),
9391
changed: false,
9492
};
@@ -102,8 +100,6 @@ struct Inliner<'tcx> {
102100
param_env: ParamEnv<'tcx>,
103101
/// Caller codegen attributes.
104102
codegen_fn_attrs: &'tcx CodegenFnAttrs,
105-
/// Caller HirID.
106-
hir_id: hir::HirId,
107103
/// Stack of inlined Instances.
108104
history: Vec<ty::Instance<'tcx>>,
109105
/// Indicates that the caller body has been modified.
@@ -179,7 +175,8 @@ impl<'tcx> Inliner<'tcx> {
179175
caller_body: &Body<'tcx>,
180176
callee: &Instance<'tcx>,
181177
) -> Result<(), &'static str> {
182-
if callee.def_id() == caller_body.source.def_id() {
178+
let caller_def_id = caller_body.source.def_id();
179+
if callee.def_id() == caller_def_id {
183180
return Err("self-recursion");
184181
}
185182

@@ -215,22 +212,19 @@ impl<'tcx> Inliner<'tcx> {
215212
}
216213

217214
if let Some(callee_def_id) = callee.def_id().as_local() {
218-
let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id);
219215
// Avoid a cycle here by only using `instance_mir` only if we have
220-
// a lower `HirId` than the callee. This ensures that the callee will
221-
// not inline us. This trick only works without incremental compilation.
222-
// So don't do it if that is enabled.
223-
if !self.tcx.dep_graph.is_fully_enabled() && self.hir_id.index() < callee_hir_id.index()
216+
// a lower `DefPathHash` than the callee. This ensures that the callee will
217+
// not inline us. This trick even works with incremental compilation,
218+
// since `DefPathHash` is stable.
219+
if self.tcx.def_path_hash(caller_def_id)
220+
< self.tcx.def_path_hash(callee_def_id.to_def_id())
224221
{
225222
return Ok(());
226223
}
227224

228225
// If we know for sure that the function we're calling will itself try to
229226
// call us, then we avoid inlining that function.
230-
if self
231-
.tcx
232-
.mir_callgraph_reachable((*callee, caller_body.source.def_id().expect_local()))
233-
{
227+
if self.tcx.mir_callgraph_reachable((*callee, caller_def_id.expect_local())) {
234228
return Err("caller might be reachable from callee (query cycle avoidance)");
235229
}
236230

0 commit comments

Comments
 (0)