Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 65b0173

Browse files
committedFeb 6, 2024
Auto merge of #120711 - matthiaskrgr:rollup-g8n27fz, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #120302 (various const interning cleanups) - #120520 (Some cleanups around diagnostic levels.) - #120521 (Make `NonZero` constructors generic.) - #120527 (Switch OwnedStore handle count to AtomicU32) - #120564 (coverage: Split out counter increment sites from BCB node/edge counters) - #120575 (Simplify codegen diagnostic handling) - #120597 (Suggest `[tail @ ..]` on `[..tail]` and `[...tail]` where `tail` is unresolved) - #120609 (hir: Stop keeping prefixes for most of `use` list stems) - #120633 (pattern_analysis: gather up place-relevant info) r? `@ghost` `@rustbot` modify labels: rollup
2 parents 4a2fe44 + 00794ce commit 65b0173

File tree

29 files changed

+703
-507
lines changed

29 files changed

+703
-507
lines changed
 

‎compiler/rustc_ast_lowering/src/item.rs

+13-6
Original file line numberDiff line numberDiff line change
@@ -498,8 +498,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
498498
}
499499
}
500500

501-
let res =
502-
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
501+
let res = self.lower_import_res(id, path.span);
503502
let path = self.lower_use_path(res, &path, ParamMode::Explicit);
504503
hir::ItemKind::Use(path, hir::UseKind::Single)
505504
}
@@ -535,7 +534,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
535534
// for that we return the `{}` import (called the
536535
// `ListStem`).
537536

538-
let prefix = Path { segments, span: prefix.span.to(path.span), tokens: None };
537+
let span = prefix.span.to(path.span);
538+
let prefix = Path { segments, span, tokens: None };
539539

540540
// Add all the nested `PathListItem`s to the HIR.
541541
for &(ref use_tree, id) in trees {
@@ -569,9 +569,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
569569
});
570570
}
571571

572-
let res =
573-
self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
574-
let path = self.lower_use_path(res, &prefix, ParamMode::Explicit);
572+
let path = if trees.is_empty() && !prefix.segments.is_empty() {
573+
// For empty lists we need to lower the prefix so it is checked for things
574+
// like stability later.
575+
let res = self.lower_import_res(id, span);
576+
self.lower_use_path(res, &prefix, ParamMode::Explicit)
577+
} else {
578+
// For non-empty lists we can just drop all the data, the prefix is already
579+
// present in HIR as a part of nested imports.
580+
self.arena.alloc(hir::UsePath { res: smallvec![], segments: &[], span })
581+
};
575582
hir::ItemKind::Use(path, hir::UseKind::ListStem)
576583
}
577584
}

‎compiler/rustc_ast_lowering/src/lib.rs

+9-3
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
6464
use rustc_session::parse::{add_feature_diagnostics, feature_err};
6565
use rustc_span::symbol::{kw, sym, Ident, Symbol};
6666
use rustc_span::{DesugaringKind, Span, DUMMY_SP};
67-
use smallvec::SmallVec;
67+
use smallvec::{smallvec, SmallVec};
6868
use std::collections::hash_map::Entry;
6969
use thin_vec::ThinVec;
7070

@@ -750,8 +750,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
750750
self.resolver.get_partial_res(id).map_or(Res::Err, |pr| pr.expect_full_res())
751751
}
752752

753-
fn expect_full_res_from_use(&mut self, id: NodeId) -> impl Iterator<Item = Res<NodeId>> {
754-
self.resolver.get_import_res(id).present_items()
753+
fn lower_import_res(&mut self, id: NodeId, span: Span) -> SmallVec<[Res; 3]> {
754+
let res = self.resolver.get_import_res(id).present_items();
755+
let res: SmallVec<_> = res.map(|res| self.lower_res(res)).collect();
756+
if res.is_empty() {
757+
self.dcx().span_delayed_bug(span, "no resolution for an import");
758+
return smallvec![Res::Err];
759+
}
760+
res
755761
}
756762

757763
fn make_lang_item_qpath(&mut self, lang_item: hir::LangItem, span: Span) -> hir::QPath<'hir> {

‎compiler/rustc_ast_lowering/src/path.rs

+1
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
196196
p: &Path,
197197
param_mode: ParamMode,
198198
) -> &'hir hir::UsePath<'hir> {
199+
assert!((1..=3).contains(&res.len()));
199200
self.arena.alloc(hir::UsePath {
200201
res,
201202
segments: self.arena.alloc_from_iter(p.segments.iter().map(|segment| {

‎compiler/rustc_codegen_ssa/src/back/write.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1810,7 +1810,7 @@ impl Translate for SharedEmitter {
18101810
}
18111811

18121812
impl Emitter for SharedEmitter {
1813-
fn emit_diagnostic(&mut self, diag: &rustc_errors::Diagnostic) {
1813+
fn emit_diagnostic(&mut self, diag: rustc_errors::Diagnostic) {
18141814
let args: FxHashMap<DiagnosticArgName, DiagnosticArgValue> =
18151815
diag.args().map(|(name, arg)| (name.clone(), arg.clone())).collect();
18161816
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {

‎compiler/rustc_const_eval/src/const_eval/eval_queries.rs

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
use std::mem;
2-
31
use either::{Left, Right};
42

53
use rustc_hir::def::DefKind;
@@ -24,12 +22,13 @@ use crate::interpret::{
2422
};
2523

2624
// Returns a pointer to where the result lives
25+
#[instrument(level = "trace", skip(ecx, body), ret)]
2726
fn eval_body_using_ecx<'mir, 'tcx>(
2827
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
2928
cid: GlobalId<'tcx>,
3029
body: &'mir mir::Body<'tcx>,
3130
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
32-
debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
31+
trace!(?ecx.param_env);
3332
let tcx = *ecx.tcx;
3433
assert!(
3534
cid.promoted.is_some()
@@ -75,11 +74,8 @@ fn eval_body_using_ecx<'mir, 'tcx>(
7574
None => InternKind::Constant,
7675
}
7776
};
78-
let check_alignment = mem::replace(&mut ecx.machine.check_alignment, CheckAlignment::No); // interning doesn't need to respect alignment
7977
intern_const_alloc_recursive(ecx, intern_kind, &ret)?;
80-
ecx.machine.check_alignment = check_alignment;
8178

82-
debug!("eval_body_using_ecx done: {:?}", ret);
8379
Ok(ret)
8480
}
8581

‎compiler/rustc_const_eval/src/interpret/intern.rs

+59-57
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,12 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
4141
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
4242
/// already mutable (as a sanity check).
4343
///
44-
/// `recursive_alloc` is called for all recursively encountered allocations.
44+
/// Returns an iterator over all relocations referred to by this allocation.
4545
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
4646
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
4747
alloc_id: AllocId,
4848
mutability: Mutability,
49-
mut recursive_alloc: impl FnMut(&InterpCx<'mir, 'tcx, M>, CtfeProvenance),
50-
) -> Result<(), ()> {
49+
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
5150
trace!("intern_shallow {:?}", alloc_id);
5251
// remove allocation
5352
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
@@ -65,14 +64,10 @@ fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
6564
assert_eq!(alloc.mutability, Mutability::Mut);
6665
}
6766
}
68-
// record child allocations
69-
for &(_, prov) in alloc.provenance().ptrs().iter() {
70-
recursive_alloc(ecx, prov);
71-
}
7267
// link the alloc id to the actual allocation
7368
let alloc = ecx.tcx.mk_const_alloc(alloc);
7469
ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
75-
Ok(())
70+
Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
7671
}
7772

7873
/// How a constant value should be interned.
@@ -128,12 +123,16 @@ pub fn intern_const_alloc_recursive<
128123
}
129124
};
130125

131-
// Initialize recursive interning.
126+
// Intern the base allocation, and initialize todo list for recursive interning.
132127
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
133-
let mut todo = vec![(base_alloc_id, base_mutability)];
128+
// First we intern the base allocation, as it requires a different mutability.
129+
// This gives us the initial set of nested allocations, which will then all be processed
130+
// recursively in the loop below.
131+
let mut todo: Vec<_> =
132+
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
134133
// We need to distinguish "has just been interned" from "was already in `tcx`",
135134
// so we track this in a separate set.
136-
let mut just_interned = FxHashSet::default();
135+
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
137136
// Whether we encountered a bad mutable pointer.
138137
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
139138
// errors.
@@ -147,52 +146,56 @@ pub fn intern_const_alloc_recursive<
147146
// raw pointers, so we cannot rely on validation to catch them -- and since interning runs
148147
// before validation, and interning doesn't know the type of anything, this means we can't show
149148
// better errors. Maybe we should consider doing validation before interning in the future.
150-
while let Some((alloc_id, mutability)) = todo.pop() {
149+
while let Some(prov) = todo.pop() {
150+
let alloc_id = prov.alloc_id();
151+
// Crucially, we check this *before* checking whether the `alloc_id`
152+
// has already been interned. The point of this check is to ensure that when
153+
// there are multiple pointers to the same allocation, they are *all* immutable.
154+
// Therefore it would be bad if we only checked the first pointer to any given
155+
// allocation.
156+
// (It is likely not possible to actually have multiple pointers to the same allocation,
157+
// so alternatively we could also check that and ICE if there are multiple such pointers.)
158+
if intern_kind != InternKind::Promoted
159+
&& inner_mutability == Mutability::Not
160+
&& !prov.immutable()
161+
{
162+
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
163+
&& !just_interned.contains(&alloc_id)
164+
{
165+
// This is a pointer to some memory from another constant. We encounter mutable
166+
// pointers to such memory since we do not always track immutability through
167+
// these "global" pointers. Allowing them is harmless; the point of these checks
168+
// during interning is to justify why we intern the *new* allocations immutably,
169+
// so we can completely ignore existing allocations. We also don't need to add
170+
// this to the todo list, since after all it is already interned.
171+
continue;
172+
}
173+
// Found a mutable pointer inside a const where inner allocations should be
174+
// immutable. We exclude promoteds from this, since things like `&mut []` and
175+
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
176+
// on the promotion analysis not screwing up to ensure that it is sound to intern
177+
// promoteds as immutable.
178+
found_bad_mutable_pointer = true;
179+
}
151180
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
152181
// Already interned.
153182
debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
154183
continue;
155184
}
156185
just_interned.insert(alloc_id);
157-
intern_shallow(ecx, alloc_id, mutability, |ecx, prov| {
158-
let alloc_id = prov.alloc_id();
159-
if intern_kind != InternKind::Promoted
160-
&& inner_mutability == Mutability::Not
161-
&& !prov.immutable()
162-
{
163-
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
164-
&& !just_interned.contains(&alloc_id)
165-
{
166-
// This is a pointer to some memory from another constant. We encounter mutable
167-
// pointers to such memory since we do not always track immutability through
168-
// these "global" pointers. Allowing them is harmless; the point of these checks
169-
// during interning is to justify why we intern the *new* allocations immutably,
170-
// so we can completely ignore existing allocations. We also don't need to add
171-
// this to the todo list, since after all it is already interned.
172-
return;
173-
}
174-
// Found a mutable pointer inside a const where inner allocations should be
175-
// immutable. We exclude promoteds from this, since things like `&mut []` and
176-
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
177-
// on the promotion analysis not screwing up to ensure that it is sound to intern
178-
// promoteds as immutable.
179-
found_bad_mutable_pointer = true;
180-
}
181-
// We always intern with `inner_mutability`, and furthermore we ensured above that if
182-
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
183-
// interned memory -- justifying that we can indeed intern immutably. However this also
184-
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
185-
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
186-
// we'd have to somehow check that they are *all* immutable before deciding that this
187-
// allocation can be made immutable. In the future we could consider analyzing all
188-
// pointers before deciding which allocations can be made immutable; but for now we are
189-
// okay with losing some potential for immutability here. This can anyway only affect
190-
// `static mut`.
191-
todo.push((alloc_id, inner_mutability));
192-
})
193-
.map_err(|()| {
186+
// We always intern with `inner_mutability`, and furthermore we ensured above that if
187+
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
188+
// interned memory -- justifying that we can indeed intern immutably. However this also
189+
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
190+
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
191+
// we'd have to somehow check that they are *all* immutable before deciding that this
192+
// allocation can be made immutable. In the future we could consider analyzing all
193+
// pointers before deciding which allocations can be made immutable; but for now we are
194+
// okay with losing some potential for immutability here. This can anyway only affect
195+
// `static mut`.
196+
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
194197
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
195-
})?;
198+
})?);
196199
}
197200
if found_bad_mutable_pointer {
198201
return Err(ecx
@@ -220,13 +223,13 @@ pub fn intern_const_alloc_for_constprop<
220223
return Ok(());
221224
}
222225
// Move allocation to `tcx`.
223-
intern_shallow(ecx, alloc_id, Mutability::Not, |_ecx, _| {
226+
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
224227
// We are not doing recursive interning, so we don't currently support provenance.
225228
// (If this assertion ever triggers, we should just implement a
226229
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
227230
panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
228-
})
229-
.map_err(|()| err_ub!(DeadLocal).into())
231+
}
232+
Ok(())
230233
}
231234

232235
impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
@@ -247,15 +250,14 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
247250
let dest = self.allocate(layout, MemoryKind::Stack)?;
248251
f(self, &dest.clone().into())?;
249252
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
250-
intern_shallow(self, alloc_id, Mutability::Not, |ecx, prov| {
253+
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
251254
// We are not doing recursive interning, so we don't currently support provenance.
252255
// (If this assertion ever triggers, we should just implement a
253256
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
254-
if !ecx.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
257+
if !self.tcx.try_get_global_alloc(prov.alloc_id()).is_some() {
255258
panic!("`intern_with_temp_alloc` with nested allocations");
256259
}
257-
})
258-
.unwrap();
260+
}
259261
Ok(alloc_id)
260262
}
261263
}

‎compiler/rustc_error_messages/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,7 @@ impl From<Cow<'static, str>> for DiagnosticMessage {
378378
}
379379
}
380380

381-
/// A workaround for "good path" ICEs when formatting types in disabled lints.
381+
/// A workaround for good_path_delayed_bug ICEs when formatting types in disabled lints.
382382
///
383383
/// Delays formatting until `.into(): DiagnosticMessage` is used.
384384
pub struct DelayDm<F>(pub F);

‎compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs

+13-9
Original file line numberDiff line numberDiff line change
@@ -44,15 +44,15 @@ impl Translate for AnnotateSnippetEmitter {
4444

4545
impl Emitter for AnnotateSnippetEmitter {
4646
/// The entry point for the diagnostics generation
47-
fn emit_diagnostic(&mut self, diag: &Diagnostic) {
47+
fn emit_diagnostic(&mut self, mut diag: Diagnostic) {
4848
let fluent_args = to_fluent_args(diag.args());
4949

50-
let mut children = diag.children.clone();
51-
let (mut primary_span, suggestions) = self.primary_span_formatted(diag, &fluent_args);
50+
let mut suggestions = diag.suggestions.unwrap_or(vec![]);
51+
self.primary_span_formatted(&mut diag.span, &mut suggestions, &fluent_args);
5252

5353
self.fix_multispans_in_extern_macros_and_render_macro_backtrace(
54-
&mut primary_span,
55-
&mut children,
54+
&mut diag.span,
55+
&mut diag.children,
5656
&diag.level,
5757
self.macro_backtrace,
5858
);
@@ -62,9 +62,9 @@ impl Emitter for AnnotateSnippetEmitter {
6262
&diag.messages,
6363
&fluent_args,
6464
&diag.code,
65-
&primary_span,
66-
&children,
67-
suggestions,
65+
&diag.span,
66+
&diag.children,
67+
&suggestions,
6868
);
6969
}
7070

@@ -85,7 +85,11 @@ fn source_string(file: Lrc<SourceFile>, line: &Line) -> String {
8585
/// Maps `Diagnostic::Level` to `snippet::AnnotationType`
8686
fn annotation_type_for_level(level: Level) -> AnnotationType {
8787
match level {
88-
Level::Bug | Level::DelayedBug(_) | Level::Fatal | Level::Error => AnnotationType::Error,
88+
Level::Bug
89+
| Level::Fatal
90+
| Level::Error
91+
| Level::DelayedBug
92+
| Level::GoodPathDelayedBug => AnnotationType::Error,
8993
Level::ForceWarning(_) | Level::Warning => AnnotationType::Warning,
9094
Level::Note | Level::OnceNote => AnnotationType::Note,
9195
Level::Help | Level::OnceHelp => AnnotationType::Help,

0 commit comments

Comments
 (0)
Please sign in to comment.