Skip to content

Commit ca1bcfd

Browse files
committed
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
syntax: Keep token span as a part of `Token` In the world with proc macros and edition hygiene `Token` without a span is not self-contained. In practice this means that tokens and spans are always stored and passed somewhere along with each other. This PR combines them into a single struct by doing the next renaming/replacement: - `Token` -> `TokenKind` - `TokenAndSpan` -> `Token` - `(Token, Span)` -> `Token` Some later commits (fb6e2fe and 1cdee86) remove duplicate spans in `token::Ident` and `token::Lifetime`. Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in #60965 (comment). The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
2 parents c5295ac + 3a31f06 commit ca1bcfd

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+978
-959
lines changed

src/doc/unstable-book/src/language-features/plugin.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -56,15 +56,15 @@ extern crate syntax_pos;
5656
extern crate rustc;
5757
extern crate rustc_plugin;
5858
59-
use syntax::parse::token;
59+
use syntax::parse::token::{self, Token};
6060
use syntax::tokenstream::TokenTree;
6161
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
6262
use syntax::ext::build::AstBuilder; // A trait for expr_usize.
6363
use syntax_pos::Span;
6464
use rustc_plugin::Registry;
6565
6666
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
67-
-> Box<MacResult + 'static> {
67+
-> Box<dyn MacResult + 'static> {
6868
6969
static NUMERALS: &'static [(&'static str, usize)] = &[
7070
("M", 1000), ("CM", 900), ("D", 500), ("CD", 400),
@@ -80,7 +80,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
8080
}
8181
8282
let text = match args[0] {
83-
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
83+
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
8484
_ => {
8585
cx.span_err(sp, "argument should be a single identifier");
8686
return DummyResult::any(sp);

src/librustc/hir/lowering.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ use syntax::source_map::CompilerDesugaringKind::IfTemporary;
6767
use syntax::std_inject;
6868
use syntax::symbol::{kw, sym, Symbol};
6969
use syntax::tokenstream::{TokenStream, TokenTree};
70-
use syntax::parse::token::Token;
70+
use syntax::parse::token::{self, Token};
7171
use syntax::visit::{self, Visitor};
7272
use syntax_pos::{DUMMY_SP, edition, Span};
7373

@@ -1328,7 +1328,7 @@ impl<'a> LoweringContext<'a> {
13281328

13291329
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
13301330
match tree {
1331-
TokenTree::Token(span, token) => self.lower_token(token, span),
1331+
TokenTree::Token(token) => self.lower_token(token),
13321332
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
13331333
span,
13341334
delim,
@@ -1337,13 +1337,13 @@ impl<'a> LoweringContext<'a> {
13371337
}
13381338
}
13391339

1340-
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
1341-
match token {
1342-
Token::Interpolated(nt) => {
1343-
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
1340+
fn lower_token(&mut self, token: Token) -> TokenStream {
1341+
match token.kind {
1342+
token::Interpolated(nt) => {
1343+
let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
13441344
self.lower_token_stream(tts)
13451345
}
1346-
other => TokenTree::Token(span, other).into(),
1346+
_ => TokenTree::Token(token).into(),
13471347
}
13481348
}
13491349

src/librustc/hir/map/def_collector.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
326326
}
327327

328328
fn visit_token(&mut self, t: Token) {
329-
if let Token::Interpolated(nt) = t {
329+
if let token::Interpolated(nt) = t.kind {
330330
if let token::NtExpr(ref expr) = *nt {
331331
if let ExprKind::Mac(..) = expr.node {
332332
self.visit_macro_invoc(expr.id);

src/librustc/ich/impls_syntax.rs

+64-60
Original file line numberDiff line numberDiff line change
@@ -261,9 +261,8 @@ for tokenstream::TokenTree {
261261
hasher: &mut StableHasher<W>) {
262262
mem::discriminant(self).hash_stable(hcx, hasher);
263263
match *self {
264-
tokenstream::TokenTree::Token(span, ref token) => {
265-
span.hash_stable(hcx, hasher);
266-
hash_token(token, hcx, hasher);
264+
tokenstream::TokenTree::Token(ref token) => {
265+
token.hash_stable(hcx, hasher);
267266
}
268267
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
269268
span.hash_stable(hcx, hasher);
@@ -306,70 +305,75 @@ impl_stable_hash_for!(struct token::Lit {
306305
suffix
307306
});
308307

309-
fn hash_token<'a, 'gcx, W: StableHasherResult>(
310-
token: &token::Token,
311-
hcx: &mut StableHashingContext<'a>,
312-
hasher: &mut StableHasher<W>,
313-
) {
314-
mem::discriminant(token).hash_stable(hcx, hasher);
315-
match *token {
316-
token::Token::Eq |
317-
token::Token::Lt |
318-
token::Token::Le |
319-
token::Token::EqEq |
320-
token::Token::Ne |
321-
token::Token::Ge |
322-
token::Token::Gt |
323-
token::Token::AndAnd |
324-
token::Token::OrOr |
325-
token::Token::Not |
326-
token::Token::Tilde |
327-
token::Token::At |
328-
token::Token::Dot |
329-
token::Token::DotDot |
330-
token::Token::DotDotDot |
331-
token::Token::DotDotEq |
332-
token::Token::Comma |
333-
token::Token::Semi |
334-
token::Token::Colon |
335-
token::Token::ModSep |
336-
token::Token::RArrow |
337-
token::Token::LArrow |
338-
token::Token::FatArrow |
339-
token::Token::Pound |
340-
token::Token::Dollar |
341-
token::Token::Question |
342-
token::Token::SingleQuote |
343-
token::Token::Whitespace |
344-
token::Token::Comment |
345-
token::Token::Eof => {}
346-
347-
token::Token::BinOp(bin_op_token) |
348-
token::Token::BinOpEq(bin_op_token) => {
349-
std_hash::Hash::hash(&bin_op_token, hasher);
350-
}
308+
impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
309+
fn hash_stable<W: StableHasherResult>(&self,
310+
hcx: &mut StableHashingContext<'a>,
311+
hasher: &mut StableHasher<W>) {
312+
mem::discriminant(self).hash_stable(hcx, hasher);
313+
match *self {
314+
token::Eq |
315+
token::Lt |
316+
token::Le |
317+
token::EqEq |
318+
token::Ne |
319+
token::Ge |
320+
token::Gt |
321+
token::AndAnd |
322+
token::OrOr |
323+
token::Not |
324+
token::Tilde |
325+
token::At |
326+
token::Dot |
327+
token::DotDot |
328+
token::DotDotDot |
329+
token::DotDotEq |
330+
token::Comma |
331+
token::Semi |
332+
token::Colon |
333+
token::ModSep |
334+
token::RArrow |
335+
token::LArrow |
336+
token::FatArrow |
337+
token::Pound |
338+
token::Dollar |
339+
token::Question |
340+
token::SingleQuote |
341+
token::Whitespace |
342+
token::Comment |
343+
token::Eof => {}
344+
345+
token::BinOp(bin_op_token) |
346+
token::BinOpEq(bin_op_token) => {
347+
std_hash::Hash::hash(&bin_op_token, hasher);
348+
}
351349

352-
token::Token::OpenDelim(delim_token) |
353-
token::Token::CloseDelim(delim_token) => {
354-
std_hash::Hash::hash(&delim_token, hasher);
355-
}
356-
token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
350+
token::OpenDelim(delim_token) |
351+
token::CloseDelim(delim_token) => {
352+
std_hash::Hash::hash(&delim_token, hasher);
353+
}
354+
token::Literal(lit) => lit.hash_stable(hcx, hasher),
357355

358-
token::Token::Ident(ident, is_raw) => {
359-
ident.name.hash_stable(hcx, hasher);
360-
is_raw.hash_stable(hcx, hasher);
361-
}
362-
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
356+
token::Ident(name, is_raw) => {
357+
name.hash_stable(hcx, hasher);
358+
is_raw.hash_stable(hcx, hasher);
359+
}
360+
token::Lifetime(name) => name.hash_stable(hcx, hasher),
363361

364-
token::Token::Interpolated(_) => {
365-
bug!("interpolated tokens should not be present in the HIR")
366-
}
362+
token::Interpolated(_) => {
363+
bug!("interpolated tokens should not be present in the HIR")
364+
}
367365

368-
token::Token::DocComment(val) |
369-
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
366+
token::DocComment(val) |
367+
token::Shebang(val) => val.hash_stable(hcx, hasher),
368+
}
370369
}
371370
}
372371

372+
impl_stable_hash_for!(struct token::Token {
373+
kind,
374+
span
375+
});
376+
373377
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
374378
MetaItem(meta_item),
375379
Literal(lit)

src/librustc_lint/builtin.rs

+3-9
Original file line numberDiff line numberDiff line change
@@ -1414,15 +1414,9 @@ impl KeywordIdents {
14141414
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
14151415
for tt in tokens.into_trees() {
14161416
match tt {
1417-
TokenTree::Token(span, tok) => match tok.ident() {
1418-
// only report non-raw idents
1419-
Some((ident, false)) => {
1420-
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
1421-
span: span.substitute_dummy(ident.span),
1422-
..ident
1423-
});
1424-
}
1425-
_ => {},
1417+
// Only report non-raw idents.
1418+
TokenTree::Token(token) => if let Some((ident, false)) = token.ident() {
1419+
self.check_ident_token(cx, UnderMacro(true), ident);
14261420
}
14271421
TokenTree::Delimited(_, _, tts) => {
14281422
self.check_tokens(cx, tts)

src/librustc_resolve/build_reduced_graph.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1053,7 +1053,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
10531053
}
10541054

10551055
fn visit_token(&mut self, t: Token) {
1056-
if let Token::Interpolated(nt) = t {
1056+
if let token::Interpolated(nt) = t.kind {
10571057
if let token::NtExpr(ref expr) = *nt {
10581058
if let ast::ExprKind::Mac(..) = expr.node {
10591059
self.visit_invoc(expr.id);

src/librustc_save_analysis/span_utils.rs

+11-11
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use crate::generated_code;
55
use std::cell::Cell;
66

77
use syntax::parse::lexer::{self, StringReader};
8-
use syntax::parse::token::{self, Token};
8+
use syntax::parse::token::{self, TokenKind};
99
use syntax_pos::*;
1010

1111
#[derive(Clone)]
@@ -56,15 +56,15 @@ impl<'a> SpanUtils<'a> {
5656
lexer::StringReader::retokenize(&self.sess.parse_sess, span)
5757
}
5858

59-
pub fn sub_span_of_token(&self, span: Span, tok: Token) -> Option<Span> {
59+
pub fn sub_span_of_token(&self, span: Span, tok: TokenKind) -> Option<Span> {
6060
let mut toks = self.retokenise_span(span);
6161
loop {
6262
let next = toks.real_token();
63-
if next.tok == token::Eof {
63+
if next == token::Eof {
6464
return None;
6565
}
66-
if next.tok == tok {
67-
return Some(next.sp);
66+
if next == tok {
67+
return Some(next.span);
6868
}
6969
}
7070
}
@@ -74,12 +74,12 @@ impl<'a> SpanUtils<'a> {
7474
// let mut toks = self.retokenise_span(span);
7575
// loop {
7676
// let ts = toks.real_token();
77-
// if ts.tok == token::Eof {
77+
// if ts == token::Eof {
7878
// return None;
7979
// }
80-
// if ts.tok == token::Not {
80+
// if ts == token::Not {
8181
// let ts = toks.real_token();
82-
// if ts.tok.is_ident() {
82+
// if ts.kind.is_ident() {
8383
// return Some(ts.sp);
8484
// } else {
8585
// return None;
@@ -93,12 +93,12 @@ impl<'a> SpanUtils<'a> {
9393
// let mut toks = self.retokenise_span(span);
9494
// let mut prev = toks.real_token();
9595
// loop {
96-
// if prev.tok == token::Eof {
96+
// if prev == token::Eof {
9797
// return None;
9898
// }
9999
// let ts = toks.real_token();
100-
// if ts.tok == token::Not {
101-
// if prev.tok.is_ident() {
100+
// if ts == token::Not {
101+
// if prev.kind.is_ident() {
102102
// return Some(prev.sp);
103103
// } else {
104104
// return None;

0 commit comments

Comments
 (0)