Skip to content

Commit 29a5872

Browse files
committed
Auto merge of #144360 - matthiaskrgr:rollup-b6ej0mm, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #144173 (Remove tidy checks for `tests/ui/issues/`) - #144234 (Fix broken TLS destructors on 32-bit win7) - #144239 (Clean `rustc/parse/src/lexer` to improve maintainability) - #144256 (Don't ICE on non-TypeId metadata within TypeId) - #144290 (update tests/ui/SUMMARY.md) - #144292 (mbe: Use concrete type for `get_unused_rule`) - #144298 (coverage: Enlarge empty spans during MIR instrumentation, not codegen) - #144311 (Add powerpc64le-unknown-linux-musl to CI rustc targets) - #144315 (bootstrap: add package.json and package-lock.json to dist tarball) r? `@ghost` `@rustbot` modify labels: rollup
2 parents 4ff3fa0 + d444815 commit 29a5872

File tree

25 files changed

+221
-1428
lines changed

25 files changed

+221
-1428
lines changed

compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs

Lines changed: 4 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,10 @@ impl Coords {
3939
/// or other expansions), and if it does happen then skipping a span or function is
4040
/// better than an ICE or `llvm-cov` failure that the user might have no way to avoid.
4141
pub(crate) fn make_coords(source_map: &SourceMap, file: &SourceFile, span: Span) -> Option<Coords> {
42-
let span = ensure_non_empty_span(source_map, span)?;
42+
if span.is_empty() {
43+
debug_assert!(false, "can't make coords from empty span: {span:?}");
44+
return None;
45+
}
4346

4447
let lo = span.lo();
4548
let hi = span.hi();
@@ -70,29 +73,6 @@ pub(crate) fn make_coords(source_map: &SourceMap, file: &SourceFile, span: Span)
7073
})
7174
}
7275

73-
fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
74-
if !span.is_empty() {
75-
return Some(span);
76-
}
77-
78-
// The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
79-
source_map
80-
.span_to_source(span, |src, start, end| try {
81-
// Adjusting span endpoints by `BytePos(1)` is normally a bug,
82-
// but in this case we have specifically checked that the character
83-
// we're skipping over is one of two specific ASCII characters, so
84-
// adjusting by exactly 1 byte is correct.
85-
if src.as_bytes().get(end).copied() == Some(b'{') {
86-
Some(span.with_hi(span.hi() + BytePos(1)))
87-
} else if start > 0 && src.as_bytes()[start - 1] == b'}' {
88-
Some(span.with_lo(span.lo() - BytePos(1)))
89-
} else {
90-
None
91-
}
92-
})
93-
.ok()?
94-
}
95-
9676
/// If `llvm-cov` sees a source region that is improperly ordered (end < start),
9777
/// it will immediately exit with a fatal error. To prevent that from happening,
9878
/// discard regions that are improperly ordered, or might be interpreted in a

compiler/rustc_const_eval/src/interpret/memory.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1000,7 +1000,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
10001000
ptr: Pointer<Option<M::Provenance>>,
10011001
) -> InterpResult<'tcx, (Ty<'tcx>, u64)> {
10021002
let (alloc_id, offset, _meta) = self.ptr_get_alloc_id(ptr, 0)?;
1003-
let GlobalAlloc::TypeId { ty } = self.tcx.global_alloc(alloc_id) else {
1003+
let Some(GlobalAlloc::TypeId { ty }) = self.tcx.try_get_global_alloc(alloc_id) else {
10041004
throw_ub_format!("invalid `TypeId` value: not all bytes carry type id metadata")
10051005
};
10061006
interp_ok((ty, offset.bytes()))

compiler/rustc_expand/src/base.rs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use std::any::Any;
12
use std::default::Default;
23
use std::iter;
34
use std::path::Component::Prefix;
@@ -361,25 +362,21 @@ where
361362
}
362363

363364
/// Represents a thing that maps token trees to Macro Results
364-
pub trait TTMacroExpander {
365+
pub trait TTMacroExpander: Any {
365366
fn expand<'cx>(
366367
&self,
367368
ecx: &'cx mut ExtCtxt<'_>,
368369
span: Span,
369370
input: TokenStream,
370371
) -> MacroExpanderResult<'cx>;
371-
372-
fn get_unused_rule(&self, _rule_i: usize) -> Option<(&Ident, Span)> {
373-
None
374-
}
375372
}
376373

377374
pub type MacroExpanderResult<'cx> = ExpandResult<Box<dyn MacResult + 'cx>, ()>;
378375

379376
pub type MacroExpanderFn =
380377
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream) -> MacroExpanderResult<'cx>;
381378

382-
impl<F> TTMacroExpander for F
379+
impl<F: 'static> TTMacroExpander for F
383380
where
384381
F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream) -> MacroExpanderResult<'cx>,
385382
{

compiler/rustc_expand/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ mod placeholders;
2222
mod proc_macro_server;
2323
mod stats;
2424

25-
pub use mbe::macro_rules::compile_declarative_macro;
25+
pub use mbe::macro_rules::{MacroRulesMacroExpander, compile_declarative_macro};
2626
pub mod base;
2727
pub mod config;
2828
pub mod expand;

compiler/rustc_expand/src/mbe/macro_rules.rs

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -128,14 +128,22 @@ pub(super) struct MacroRule {
128128
rhs: mbe::TokenTree,
129129
}
130130

131-
struct MacroRulesMacroExpander {
131+
pub struct MacroRulesMacroExpander {
132132
node_id: NodeId,
133133
name: Ident,
134134
span: Span,
135135
transparency: Transparency,
136136
rules: Vec<MacroRule>,
137137
}
138138

139+
impl MacroRulesMacroExpander {
140+
pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
141+
// If the rhs contains an invocation like `compile_error!`, don't report it as unused.
142+
let rule = &self.rules[rule_i];
143+
if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
144+
}
145+
}
146+
139147
impl TTMacroExpander for MacroRulesMacroExpander {
140148
fn expand<'cx>(
141149
&self,
@@ -154,12 +162,6 @@ impl TTMacroExpander for MacroRulesMacroExpander {
154162
&self.rules,
155163
))
156164
}
157-
158-
fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> {
159-
// If the rhs contains an invocation like `compile_error!`, don't report it as unused.
160-
let rule = &self.rules[rule_i];
161-
if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) }
162-
}
163165
}
164166

165167
struct DummyExpander(ErrorGuaranteed);

compiler/rustc_mir_transform/src/coverage/spans.rs

Lines changed: 36 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
use rustc_data_structures::fx::FxHashSet;
22
use rustc_middle::mir;
33
use rustc_middle::ty::TyCtxt;
4-
use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
4+
use rustc_span::source_map::SourceMap;
5+
use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span};
56
use tracing::instrument;
67

78
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
@@ -83,8 +84,18 @@ pub(super) fn extract_refined_covspans<'tcx>(
8384
// Discard any span that overlaps with a hole.
8485
discard_spans_overlapping_holes(&mut covspans, &holes);
8586

86-
// Perform more refinement steps after holes have been dealt with.
87+
// Discard spans that overlap in unwanted ways.
8788
let mut covspans = remove_unwanted_overlapping_spans(covspans);
89+
90+
// For all empty spans, either enlarge them to be non-empty, or discard them.
91+
let source_map = tcx.sess.source_map();
92+
covspans.retain_mut(|covspan| {
93+
let Some(span) = ensure_non_empty_span(source_map, covspan.span) else { return false };
94+
covspan.span = span;
95+
true
96+
});
97+
98+
// Merge covspans that can be merged.
8899
covspans.dedup_by(|b, a| a.merge_if_eligible(b));
89100

90101
code_mappings.extend(covspans.into_iter().map(|Covspan { span, bcb }| {
@@ -230,3 +241,26 @@ fn compare_spans(a: Span, b: Span) -> std::cmp::Ordering {
230241
// - Both have the same start and span A extends further right
231242
.then_with(|| Ord::cmp(&a.hi(), &b.hi()).reverse())
232243
}
244+
245+
fn ensure_non_empty_span(source_map: &SourceMap, span: Span) -> Option<Span> {
246+
if !span.is_empty() {
247+
return Some(span);
248+
}
249+
250+
// The span is empty, so try to enlarge it to cover an adjacent '{' or '}'.
251+
source_map
252+
.span_to_source(span, |src, start, end| try {
253+
// Adjusting span endpoints by `BytePos(1)` is normally a bug,
254+
// but in this case we have specifically checked that the character
255+
// we're skipping over is one of two specific ASCII characters, so
256+
// adjusting by exactly 1 byte is correct.
257+
if src.as_bytes().get(end).copied() == Some(b'{') {
258+
Some(span.with_hi(span.hi() + BytePos(1)))
259+
} else if start > 0 && src.as_bytes()[start - 1] == b'}' {
260+
Some(span.with_lo(span.lo() - BytePos(1)))
261+
} else {
262+
None
263+
}
264+
})
265+
.ok()?
266+
}

compiler/rustc_parse/src/lexer/diagnostics.rs

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -126,23 +126,29 @@ pub(super) fn report_suspicious_mismatch_block(
126126
}
127127
}
128128

129-
pub(crate) fn make_unclosed_delims_error(
130-
unmatched: UnmatchedDelim,
131-
psess: &ParseSess,
132-
) -> Option<Diag<'_>> {
133-
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
134-
// `unmatched_delims` only for error recovery in the `Parser`.
135-
let found_delim = unmatched.found_delim?;
136-
let mut spans = vec![unmatched.found_span];
137-
if let Some(sp) = unmatched.unclosed_span {
138-
spans.push(sp);
139-
};
140-
let err = psess.dcx().create_err(MismatchedClosingDelimiter {
141-
spans,
142-
delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
143-
unmatched: unmatched.found_span,
144-
opening_candidate: unmatched.candidate_span,
145-
unclosed: unmatched.unclosed_span,
146-
});
147-
Some(err)
129+
pub(crate) fn make_errors_for_mismatched_closing_delims<'psess>(
130+
unmatcheds: &[UnmatchedDelim],
131+
psess: &'psess ParseSess,
132+
) -> Vec<Diag<'psess>> {
133+
unmatcheds
134+
.iter()
135+
.filter_map(|unmatched| {
136+
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
137+
// `unmatched_delims` only for error recovery in the `Parser`.
138+
let found_delim = unmatched.found_delim?;
139+
let mut spans = vec![unmatched.found_span];
140+
if let Some(sp) = unmatched.unclosed_span {
141+
spans.push(sp);
142+
};
143+
let err = psess.dcx().create_err(MismatchedClosingDelimiter {
144+
spans,
145+
delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind())
146+
.to_string(),
147+
unmatched: unmatched.found_span,
148+
opening_candidate: unmatched.candidate_span,
149+
unclosed: unmatched.unclosed_span,
150+
});
151+
Some(err)
152+
})
153+
.collect()
148154
}

compiler/rustc_parse/src/lexer/mod.rs

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use diagnostics::make_unclosed_delims_error;
1+
use diagnostics::make_errors_for_mismatched_closing_delims;
22
use rustc_ast::ast::{self, AttrStyle};
33
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
44
use rustc_ast::tokenstream::TokenStream;
@@ -71,27 +71,23 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
7171
};
7272
let res = lexer.lex_token_trees(/* is_delimited */ false);
7373

74-
let mut unmatched_delims: Vec<_> = lexer
75-
.diag_info
76-
.unmatched_delims
77-
.into_iter()
78-
.filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
79-
.collect();
74+
let mut unmatched_closing_delims: Vec<_> =
75+
make_errors_for_mismatched_closing_delims(&lexer.diag_info.unmatched_delims, psess);
8076

8177
match res {
8278
Ok((_open_spacing, stream)) => {
83-
if unmatched_delims.is_empty() {
79+
if unmatched_closing_delims.is_empty() {
8480
Ok(stream)
8581
} else {
8682
// Return error if there are unmatched delimiters or unclosed delimiters.
87-
Err(unmatched_delims)
83+
Err(unmatched_closing_delims)
8884
}
8985
}
9086
Err(errs) => {
9187
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
9288
// because the delimiter mismatch is more likely to be the root cause of error
93-
unmatched_delims.extend(errs);
94-
Err(unmatched_delims)
89+
unmatched_closing_delims.extend(errs);
90+
Err(unmatched_closing_delims)
9591
}
9692
}
9793
}

0 commit comments

Comments
 (0)