Skip to content

Commit

Permalink
Auto merge of rust-lang#17601 - Veykril:proc-macro-fix, r=Veykril
Browse files Browse the repository at this point in the history
Fix incorrect encoding of literals in the proc-macro-api on version 4

Quick follow up on rust-lang/rust-analyzer#17559 breaking things
  • Loading branch information
bors committed Jul 15, 2024
2 parents c5fc669 + 311aaa5 commit a2c46b9
Show file tree
Hide file tree
Showing 12 changed files with 183 additions and 134 deletions.
3 changes: 2 additions & 1 deletion src/tools/rust-analyzer/Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1046,7 +1046,6 @@ dependencies = [
"arrayvec",
"cov-mark",
"parser",
"ra-ap-rustc_lexer",
"rustc-hash",
"smallvec",
"span",
Expand Down Expand Up @@ -1326,6 +1325,7 @@ dependencies = [
"base-db",
"indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mbe",
"paths",
"rustc-hash",
"serde",
Expand Down Expand Up @@ -2218,6 +2218,7 @@ name = "tt"
version = "0.0.0"
dependencies = [
"arrayvec",
"ra-ap-rustc_lexer",
"smol_str",
"stdx",
"text-size",
Expand Down
7 changes: 4 additions & 3 deletions src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@ use base_db::CrateId;
use cfg::CfgExpr;
use either::Either;
use intern::{sym, Interned};

use mbe::{
desugar_doc_comment_text, syntax_node_to_token_tree, token_to_literal, DelimiterKind,
DocCommentDesugarMode, Punct,
desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
Punct,
};
use smallvec::{smallvec, SmallVec};
use span::{Span, SyntaxContextId};
Expand All @@ -20,7 +21,7 @@ use crate::{
db::ExpandDatabase,
mod_path::ModPath,
span_map::SpanMapRef,
tt::{self, Subtree},
tt::{self, token_to_literal, Subtree},
InFile,
};

Expand Down
2 changes: 1 addition & 1 deletion src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ pub use span::{HirFileId, MacroCallId, MacroFileId};

pub mod tt {
pub use span::Span;
pub use tt::{DelimiterKind, IdentIsRaw, LitKind, Spacing};
pub use tt::{token_to_literal, DelimiterKind, IdentIsRaw, LitKind, Spacing};

pub type Delimiter = ::tt::Delimiter<Span>;
pub type DelimSpan = ::tt::DelimSpan<Span>;
Expand Down
3 changes: 1 addition & 2 deletions src/tools/rust-analyzer/crates/mbe/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ rustc-hash.workspace = true
smallvec.workspace = true
tracing.workspace = true
arrayvec.workspace = true
ra-ap-rustc_lexer.workspace = true

# local deps
syntax.workspace = true
Expand All @@ -30,7 +29,7 @@ span.workspace = true
test-utils.workspace = true

[features]
in-rust-tree = ["parser/in-rust-tree", "syntax/in-rust-tree"]
in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]

[lints]
workspace = true
9 changes: 1 addition & 8 deletions src/tools/rust-analyzer/crates/mbe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,6 @@
//! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`.

#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]

#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_lexer as rustc_lexer;
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;

mod expander;
mod parser;
mod syntax_bridge;
Expand All @@ -36,7 +29,7 @@ pub use tt::{Delimiter, DelimiterKind, Punct};
pub use crate::syntax_bridge::{
desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
token_to_literal, token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
};

pub use crate::syntax_bridge::dummy_test_span_utils::*;
Expand Down
53 changes: 2 additions & 51 deletions src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use std::fmt;

use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, SpanAnchor, SpanData, SpanMap};
use stdx::{format_to, itertools::Itertools, never, non_empty_vec::NonEmptyVec};
use stdx::{format_to, never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
format_smolstr, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement,
Expand All @@ -14,6 +14,7 @@ use syntax::{
use tt::{
buffer::{Cursor, TokenBuffer},
iter::TtIter,
token_to_literal,
};

use crate::to_parser_input::to_parser_input;
Expand Down Expand Up @@ -400,56 +401,6 @@ where
}
}

pub fn token_to_literal<S>(text: SmolStr, span: S) -> tt::Literal<S>
where
S: Copy,
{
use rustc_lexer::LiteralKind;

let token = rustc_lexer::tokenize(&text).next_tuple();
let Some((rustc_lexer::Token {
kind: rustc_lexer::TokenKind::Literal { kind, suffix_start },
..
},)) = token
else {
return tt::Literal { span, text, kind: tt::LitKind::Err(()), suffix: None };
};

let (kind, start_offset, end_offset) = match kind {
LiteralKind::Int { .. } => (tt::LitKind::Integer, 0, 0),
LiteralKind::Float { .. } => (tt::LitKind::Float, 0, 0),
LiteralKind::Char { terminated } => (tt::LitKind::Char, 1, terminated as usize),
LiteralKind::Byte { terminated } => (tt::LitKind::Byte, 2, terminated as usize),
LiteralKind::Str { terminated } => (tt::LitKind::Str, 1, terminated as usize),
LiteralKind::ByteStr { terminated } => (tt::LitKind::ByteStr, 2, terminated as usize),
LiteralKind::CStr { terminated } => (tt::LitKind::CStr, 2, terminated as usize),
LiteralKind::RawStr { n_hashes } => (
tt::LitKind::StrRaw(n_hashes.unwrap_or_default()),
2 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawByteStr { n_hashes } => (
tt::LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
LiteralKind::RawCStr { n_hashes } => (
tt::LitKind::CStrRaw(n_hashes.unwrap_or_default()),
3 + n_hashes.unwrap_or_default() as usize,
1 + n_hashes.unwrap_or_default() as usize,
),
};

let (lit, suffix) = text.split_at(suffix_start as usize);
let lit = &lit[start_offset..lit.len() - end_offset];
let suffix = match suffix {
"" | "_" => None,
suffix => Some(Box::new(suffix.into())),
};

tt::Literal { span, text: lit.into(), kind, suffix }
}

fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
Expand Down
2 changes: 2 additions & 0 deletions src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ span.workspace = true
# InternIds for the syntax context
base-db.workspace = true
la-arena.workspace = true
# only here to parse via token_to_literal
mbe.workspace = true

[lints]
workspace = true
53 changes: 28 additions & 25 deletions src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ mod tests {
.into(),
),
TokenTree::Leaf(Leaf::Literal(Literal {
text: "\"Foo\"".into(),
text: "Foo".into(),
span: Span {
range: TextRange::at(TextSize::new(10), TextSize::of("\"Foo\"")),
anchor,
Expand Down Expand Up @@ -263,32 +263,35 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
serialize: true,
def_site: 0,
call_site: 0,
mixed_site: 0,
for v in RUST_ANALYZER_SPAN_SUPPORT..=CURRENT_API_VERSION {
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
macro_body: FlatTree::new(&tt, v, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
serialize: true,
def_site: 0,
call_site: 0,
mixed_site: 0,
},
span_data_table: Vec::new(),
},
span_data_table: Vec::new(),
},
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
};
lib: Utf8PathBuf::from_path_buf(std::env::current_dir().unwrap()).unwrap(),
env: Default::default(),
current_dir: Default::default(),
};

let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();

assert_eq!(
tt,
back.data.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
);
assert_eq!(
tt,
back.data.macro_body.to_subtree_resolved(v, &span_data_table),
"version: {v}"
);
}
}
}
Loading

0 comments on commit a2c46b9

Please sign in to comment.