Skip to content

Clean: rename open_braces to open_delimiters in lexer and move make_unclosed_delims_error into diagnostics.rs. #140147

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 23, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 26 additions & 2 deletions compiler/rustc_parse/src/lexer/diagnostics.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
use rustc_ast::token::Delimiter;
use rustc_errors::Diag;
use rustc_session::parse::ParseSess;
use rustc_span::Span;
use rustc_span::source_map::SourceMap;

use super::UnmatchedDelim;
use crate::errors::MismatchedClosingDelimiter;
use crate::pprust;

#[derive(Default)]
pub(super) struct TokenTreeDiagInfo {
/// Stack of open delimiters and their spans. Used for error message.
pub open_braces: Vec<(Delimiter, Span)>,
pub open_delimiters: Vec<(Delimiter, Span)>,
pub unmatched_delims: Vec<UnmatchedDelim>,

/// Used only for error recovery when arriving to EOF with mismatched braces.
Expand Down Expand Up @@ -108,11 +111,32 @@ pub(super) fn report_suspicious_mismatch_block(
} else {
// If there is no suspicious span, give the last properly closed block may help
if let Some(parent) = diag_info.matching_block_spans.last()
&& diag_info.open_braces.last().is_none()
&& diag_info.open_delimiters.last().is_none()
&& diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1))
{
err.span_label(parent.0, "this opening brace...");
err.span_label(parent.1, "...matches this closing brace");
}
}
}

pub(crate) fn make_unclosed_delims_error(
unmatched: UnmatchedDelim,
psess: &ParseSess,
) -> Option<Diag<'_>> {
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
// `unmatched_delims` only for error recovery in the `Parser`.
let found_delim = unmatched.found_delim?;
let mut spans = vec![unmatched.found_span];
if let Some(sp) = unmatched.unclosed_span {
spans.push(sp);
};
let err = psess.dcx().create_err(MismatchedClosingDelimiter {
spans,
delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
unmatched: unmatched.found_span,
opening_candidate: unmatched.candidate_span,
unclosed: unmatched.unclosed_span,
});
Some(err)
}
3 changes: 2 additions & 1 deletion compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::ops::Range;

use diagnostics::make_unclosed_delims_error;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
Expand All @@ -17,9 +18,9 @@ use rustc_session::parse::ParseSess;
use rustc_span::{BytePos, Pos, Span, Symbol};
use tracing::debug;

use crate::errors;
use crate::lexer::diagnostics::TokenTreeDiagInfo;
use crate::lexer::unicode_chars::UNICODE_ARRAY;
use crate::{errors, make_unclosed_delims_error};

mod diagnostics;
mod tokentrees;
Expand Down
45 changes: 24 additions & 21 deletions compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
let mut err = self.dcx().struct_span_err(self.token.span, msg);

let unclosed_delimiter_show_limit = 5;
let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_braces.len());
for &(_, span) in &self.diag_info.open_braces[..len] {
let len = usize::min(unclosed_delimiter_show_limit, self.diag_info.open_delimiters.len());
for &(_, span) in &self.diag_info.open_delimiters[..len] {
err.span_label(span, "unclosed delimiter");
self.diag_info.unmatched_delims.push(UnmatchedDelim {
found_delim: None,
Expand All @@ -65,19 +65,19 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
});
}

if let Some((_, span)) = self.diag_info.open_braces.get(unclosed_delimiter_show_limit)
&& self.diag_info.open_braces.len() >= unclosed_delimiter_show_limit + 2
if let Some((_, span)) = self.diag_info.open_delimiters.get(unclosed_delimiter_show_limit)
&& self.diag_info.open_delimiters.len() >= unclosed_delimiter_show_limit + 2
{
err.span_label(
*span,
format!(
"another {} unclosed delimiters begin from here",
self.diag_info.open_braces.len() - unclosed_delimiter_show_limit
self.diag_info.open_delimiters.len() - unclosed_delimiter_show_limit
),
);
}

if let Some((delim, _)) = self.diag_info.open_braces.last() {
if let Some((delim, _)) = self.diag_info.open_delimiters.last() {
report_suspicious_mismatch_block(
&mut err,
&self.diag_info,
Expand All @@ -95,7 +95,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
// The span for beginning of the delimited section.
let pre_span = self.token.span;

self.diag_info.open_braces.push((open_delim, self.token.span));
self.diag_info.open_delimiters.push((open_delim, self.token.span));

// Lex the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
Expand All @@ -109,11 +109,12 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() {
if close_delim == open_delim {
// Correct delimiter.
let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
let close_brace_span = self.token.span;
let (open_delimiter, open_delimiter_span) =
self.diag_info.open_delimiters.pop().unwrap();
let close_delimiter_span = self.token.span;

if tts.is_empty() && close_delim == Delimiter::Brace {
let empty_block_span = open_brace_span.to(close_brace_span);
let empty_block_span = open_delimiter_span.to(close_delimiter_span);
if !sm.is_multiline(empty_block_span) {
// Only track if the block is in the form of `{}`, otherwise it is
// likely that it was written on purpose.
Expand All @@ -122,9 +123,11 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
}

// only add braces
if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
if let (Delimiter::Brace, Delimiter::Brace) = (open_delimiter, open_delim) {
// Add all the matching spans, we will sort by span later
self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span));
self.diag_info
.matching_block_spans
.push((open_delimiter_span, close_delimiter_span));
}

// Move past the closing delimiter.
Expand All @@ -140,26 +143,26 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
// This is a conservative error: only report the last unclosed
// delimiter. The previous unclosed delimiters could actually be
// closed! The lexer just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
if let Some(&(_, sp)) = self.diag_info.open_delimiters.last() {
unclosed_delimiter = Some(sp);
};
for (brace, brace_span) in &self.diag_info.open_braces {
if same_indentation_level(sm, self.token.span, *brace_span)
&& brace == &close_delim
for (delimiter, delimiter_span) in &self.diag_info.open_delimiters {
if same_indentation_level(sm, self.token.span, *delimiter_span)
&& delimiter == &close_delim
{
// high likelihood of these two corresponding
candidate = Some(*brace_span);
candidate = Some(*delimiter_span);
}
}
let (_, _) = self.diag_info.open_braces.pop().unwrap();
let (_, _) = self.diag_info.open_delimiters.pop().unwrap();
self.diag_info.unmatched_delims.push(UnmatchedDelim {
found_delim: Some(close_delim),
found_span: self.token.span,
unclosed_span: unclosed_delimiter,
candidate_span: candidate,
});
} else {
self.diag_info.open_braces.pop();
self.diag_info.open_delimiters.pop();
}

// If the incorrect delimiter matches an earlier opening
Expand All @@ -169,7 +172,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
// fn foo() {
// bar(baz(
// } // Incorrect delimiter but matches the earlier `{`
if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
if !self.diag_info.open_delimiters.iter().any(|&(d, _)| d == close_delim) {
self.bump_minimal()
} else {
// The choice of value here doesn't matter.
Expand All @@ -180,7 +183,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
assert_eq!(self.token.kind, token::Eof);
// Silently recover, the EOF token will be seen again
// and an error emitted then. Thus we don't pop from
// self.open_braces here. The choice of spacing value here
// self.open_delimiters here. The choice of spacing value here
// doesn't matter.
Spacing::Alone
};
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");

#[macro_use]
pub mod parser;
use parser::{Parser, make_unclosed_delims_error};
use parser::Parser;
pub mod lexer;
pub mod validate_attr;

Expand Down
26 changes: 1 addition & 25 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,8 @@ use token_type::TokenTypeSet;
pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
use tracing::debug;

use crate::errors::{
self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
};
use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral};
use crate::exp;
use crate::lexer::UnmatchedDelim;

#[cfg(test)]
mod tests;
Expand Down Expand Up @@ -1745,27 +1742,6 @@ impl<'a> Parser<'a> {
}
}

pub(crate) fn make_unclosed_delims_error(
unmatched: UnmatchedDelim,
psess: &ParseSess,
) -> Option<Diag<'_>> {
// `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
// `unmatched_delims` only for error recovery in the `Parser`.
let found_delim = unmatched.found_delim?;
let mut spans = vec![unmatched.found_span];
if let Some(sp) = unmatched.unclosed_span {
spans.push(sp);
};
let err = psess.dcx().create_err(MismatchedClosingDelimiter {
spans,
delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
unmatched: unmatched.found_span,
opening_candidate: unmatched.candidate_span,
unclosed: unmatched.unclosed_span,
});
Some(err)
}

/// A helper struct used when building an `AttrTokenStream` from
/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
Expand Down
Loading