From ec3f35bf63c80bc37952b4f1385b81bccea71900 Mon Sep 17 00:00:00 2001 From: Mine Starks Date: Tue, 14 Nov 2023 14:39:17 -0800 Subject: [PATCH 01/80] Show placeholder while run command gets runnables from server --- editors/code/src/run.ts | 151 +++++++++++++++++++++++++--------------- 1 file changed, 94 insertions(+), 57 deletions(-) diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 57881803a6a04..778cbc5762aeb 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts @@ -7,6 +7,8 @@ import type { CtxInit } from "./ctx"; import { makeDebugConfig } from "./debug"; import type { Config, RunnableEnvCfg, RunnableEnvCfgItem } from "./config"; import { unwrapUndefinable } from "./undefinable"; +import type { LanguageClient } from "vscode-languageclient/node"; +import type { RustEditor } from "./util"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -21,73 +23,36 @@ export async function selectRunnable( const editor = ctx.activeRustEditor; if (!editor) return; - const client = ctx.client; - const textDocument: lc.TextDocumentIdentifier = { - uri: editor.document.uri.toString(), - }; - - const runnables = await client.sendRequest(ra.runnables, { - textDocument, - position: client.code2ProtocolConverter.asPosition(editor.selection.active), - }); - const items: RunnableQuickPick[] = []; - if (prevRunnable) { - items.push(prevRunnable); + // show a placeholder while we get the runnables from the server + const quickPick = vscode.window.createQuickPick(); + quickPick.title = "Select Runnable"; + if (showButtons) { + quickPick.buttons = quickPickButtons; } - for (const r of runnables) { - if (prevRunnable && JSON.stringify(prevRunnable.runnable) === JSON.stringify(r)) { - continue; - } + quickPick.items = [{ label: "Looking for runnables..." }]; + quickPick.activeItems = []; + quickPick.show(); - if (debuggeeOnly && (r.label.startsWith("doctest") || r.label.startsWith("cargo"))) { - continue; - } - items.push(new RunnableQuickPick(r)); - } + const runnables = await getRunnables(ctx.client, editor, prevRunnable, debuggeeOnly); - if (items.length === 0) { + if (runnables.length === 0) { // it is the debug case, run always has at least 'cargo check ...' // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables await vscode.window.showErrorMessage("There's no debug target!"); + quickPick.dispose(); return; } - return await new Promise((resolve) => { - const disposables: vscode.Disposable[] = []; - const close = (result?: RunnableQuickPick) => { - resolve(result); - disposables.forEach((d) => d.dispose()); - }; + // clear the list before we hook up listeners to to avoid invoking them + // if the user happens to accept the placeholder item + quickPick.items = []; - const quickPick = vscode.window.createQuickPick(); - quickPick.items = items; - quickPick.title = "Select Runnable"; - if (showButtons) { - quickPick.buttons = quickPickButtons; - } - disposables.push( - quickPick.onDidHide(() => close()), - quickPick.onDidAccept(() => close(quickPick.selectedItems[0])), - quickPick.onDidTriggerButton(async (_button) => { - const runnable = unwrapUndefinable(quickPick.activeItems[0]).runnable; - await makeDebugConfig(ctx, runnable); - close(); - }), - quickPick.onDidChangeActive((activeList) => { - if (showButtons && activeList.length > 0) { - const active = unwrapUndefinable(activeList[0]); - if (active.label.startsWith("cargo")) { - // save button makes no sense for `cargo test` or `cargo check` - quickPick.buttons = []; - } else if (quickPick.buttons.length === 0) { - quickPick.buttons = quickPickButtons; - } - } - }), - quickPick, - ); - quickPick.show(); - }); + return await populateAndGetSelection( + quickPick as vscode.QuickPick, + runnables, + ctx, + showButtons, + ); } export class RunnableQuickPick implements vscode.QuickPickItem { @@ -187,3 +152,75 @@ export function createArgs(runnable: ra.Runnable): string[] { } return args; } + +async function getRunnables( + client: LanguageClient, + editor: RustEditor, + prevRunnable?: RunnableQuickPick, + debuggeeOnly = false, +): Promise { + const textDocument: lc.TextDocumentIdentifier = { + uri: editor.document.uri.toString(), + }; + + const runnables = await client.sendRequest(ra.runnables, { + textDocument, + position: client.code2ProtocolConverter.asPosition(editor.selection.active), + }); + const items: RunnableQuickPick[] = []; + if (prevRunnable) { + items.push(prevRunnable); + } + for (const r of runnables) { + if (prevRunnable && JSON.stringify(prevRunnable.runnable) === JSON.stringify(r)) { + continue; + } + + if (debuggeeOnly && (r.label.startsWith("doctest") || r.label.startsWith("cargo"))) { + continue; + } + items.push(new RunnableQuickPick(r)); + } + + return items; +} + +async function populateAndGetSelection( + quickPick: vscode.QuickPick, + runnables: RunnableQuickPick[], + ctx: CtxInit, + showButtons: boolean, +): Promise { + return new Promise((resolve) => { + const disposables: vscode.Disposable[] = []; + const close = (result?: RunnableQuickPick) => { + resolve(result); + disposables.forEach((d) => d.dispose()); + }; + disposables.push( + quickPick.onDidHide(() => close()), + quickPick.onDidAccept(() => close(quickPick.selectedItems[0] as RunnableQuickPick)), + quickPick.onDidTriggerButton(async (_button) => { + const runnable = unwrapUndefinable( + quickPick.activeItems[0] as RunnableQuickPick, + ).runnable; + await makeDebugConfig(ctx, runnable); + close(); + }), + quickPick.onDidChangeActive((activeList) => { + if (showButtons && activeList.length > 0) { + const active = unwrapUndefinable(activeList[0]); + if (active.label.startsWith("cargo")) { + // save button makes no sense for `cargo test` or `cargo check` + quickPick.buttons = []; + } else if (quickPick.buttons.length === 0) { + quickPick.buttons = quickPickButtons; + } + } + }), + quickPick, + ); + // populate the list with the actual runnables + quickPick.items = runnables; + }); +} From 83f91f61b13eeff1dc055290e53392f15b9e8660 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 29 Jun 2023 11:12:48 +0200 Subject: [PATCH 02/80] Infect mbe crate with generic span type parameter --- crates/mbe/src/benchmark.rs | 45 ++++--- crates/mbe/src/expander.rs | 41 ++++--- crates/mbe/src/expander/matcher.rs | 150 +++++++++++++---------- crates/mbe/src/expander/transcriber.rs | 99 ++++++++-------- crates/mbe/src/lib.rs | 31 ++--- crates/mbe/src/parser.rs | 68 ++++++----- crates/mbe/src/syntax_bridge.rs | 54 ++++----- crates/mbe/src/to_parser_input.rs | 4 +- crates/mbe/src/tt_iter.rs | 35 +++--- crates/tt/src/lib.rs | 158 +++++++++++++------------ 10 files changed, 360 insertions(+), 325 deletions(-) diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 9d43e130457dd..19cb20354b74e 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,10 +6,11 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; +use tt::{Span, TokenId}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, tt, DeclarativeMacro, + syntax_node_to_token_tree, DeclarativeMacro, }; #[test] @@ -54,7 +55,7 @@ fn macro_rules_fixtures() -> FxHashMap { .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap { +fn macro_rules_fixtures_tt() -> FxHashMap> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -71,7 +72,9 @@ fn macro_rules_fixtures_tt() -> FxHashMap { } /// Generate random invocation fixtures from rules -fn invocation_fixtures(rules: &FxHashMap) -> Vec<(String, tt::Subtree)> { +fn invocation_fixtures( + rules: &FxHashMap, +) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -116,7 +119,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } return res; - fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -202,36 +205,30 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { - span: tt::TokenId::unspecified(), - text: SmolStr::new(ident), - }) - .into() + fn make_ident(ident: &str) -> tt::TokenTree { + tt::Leaf::Ident(tt::Ident { span: tt::TokenId::DUMMY, text: SmolStr::new(ident) }) + .into() } - fn make_punct(char: char) -> tt::TokenTree { + fn make_punct(char: char) -> tt::TokenTree { tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, char, spacing: tt::Spacing::Alone, }) .into() } - fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { - span: tt::TokenId::unspecified(), - text: SmolStr::new(lit), - }) - .into() + fn make_literal(lit: &str) -> tt::TokenTree { + tt::Leaf::Literal(tt::Literal { span: tt::TokenId::DUMMY, text: SmolStr::new(lit) }) + .into() } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>, - ) -> tt::TokenTree { + token_trees: Option>>, + ) -> tt::TokenTree { tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind, }, token_trees: token_trees.unwrap_or_default(), diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 908048c990424..fac2b33758108 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -7,15 +7,16 @@ mod transcriber; use rustc_hash::FxHashMap; use syntax::SmolStr; +use tt::Span; -use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult}; +use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; -pub(crate) fn expand_rules( - rules: &[crate::Rule], - input: &tt::Subtree, +pub(crate) fn expand_rules( + rules: &[crate::Rule], + input: &tt::Subtree, is_2021: bool, -) -> ExpandResult { - let mut match_: Option<(matcher::Match, &crate::Rule)> = None; +) -> ExpandResult> { + let mut match_: Option<(matcher::Match, &crate::Rule)> = None; for rule in rules { let new_match = matcher::match_(&rule.lhs, input, is_2021); @@ -47,7 +48,7 @@ pub(crate) fn expand_rules( ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }, ExpandError::NoMatchingRule, ) } @@ -98,23 +99,29 @@ pub(crate) fn expand_rules( /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to /// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// many is not a plain `usize`, but a `&[usize]`. -#[derive(Debug, Default, Clone, PartialEq, Eq)] -struct Bindings { - inner: FxHashMap, +#[derive(Debug, Clone, PartialEq, Eq)] +struct Bindings { + inner: FxHashMap>, +} + +impl Default for Bindings { + fn default() -> Self { + Self { inner: Default::default() } + } } #[derive(Debug, Clone, PartialEq, Eq)] -enum Binding { - Fragment(Fragment), - Nested(Vec), +enum Binding { + Fragment(Fragment), + Nested(Vec>), Empty, Missing(MetaVarKind), } #[derive(Debug, Clone, PartialEq, Eq)] -enum Fragment { +enum Fragment { /// token fragments are just copy-pasted into the output - Tokens(tt::TokenTree), + Tokens(tt::TokenTree), /// Expr ast fragments are surrounded with `()` on insertion to preserve /// precedence. Note that this impl is different from the one currently in /// `rustc` -- `rustc` doesn't translate fragments into token trees at all. @@ -122,7 +129,7 @@ enum Fragment { /// At one point in time, we tried to use "fake" delimiters here à la /// proc-macro delimiter=none. As we later discovered, "none" delimiters are /// tricky to handle in the parser, and rustc doesn't handle those either. - Expr(tt::TokenTree), + Expr(tt::TokenTree), /// There are roughly two types of paths: paths in expression context, where a /// separator `::` between an identifier and its following generic argument list /// is mandatory, and paths in type context, where `::` can be omitted. @@ -132,5 +139,5 @@ enum Fragment { /// and is trasncribed as an expression-context path, verbatim transcription /// would cause a syntax error. We need to fix it up just before transcribing; /// see `transcriber::fix_up_and_push_path_tt()`. - Path(tt::TokenTree), + Path(tt::TokenTree), } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 1471af98b75b8..796c9f2eb3211 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -63,21 +63,20 @@ use std::rc::Rc; use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; +use tt::Span; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt, tt_iter::TtIter, ExpandError, MetaTemplate, ValueResult, }; -impl Bindings { +impl Bindings { fn push_optional(&mut self, name: &SmolStr) { // FIXME: Do we have a better way to represent an empty token ? // Insert an empty subtree for empty token - let tt = - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into(); + let tt = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }.into(); self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); } @@ -85,14 +84,14 @@ impl Bindings { self.inner.insert(name.clone(), Binding::Empty); } - fn bindings(&self) -> impl Iterator { + fn bindings(&self) -> impl Iterator> { self.inner.values() } } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub(super) struct Match { - pub(super) bindings: Bindings, +#[derive(Clone, Debug, PartialEq, Eq)] +pub(super) struct Match { + pub(super) bindings: Bindings, /// We currently just keep the first error and count the rest to compare matches. pub(super) err: Option, pub(super) err_count: usize, @@ -102,7 +101,19 @@ pub(super) struct Match { pub(super) bound_count: usize, } -impl Match { +impl Default for Match { + fn default() -> Self { + Self { + bindings: Default::default(), + err: Default::default(), + err_count: Default::default(), + unmatched_tts: Default::default(), + bound_count: Default::default(), + } + } +} + +impl Match { fn add_err(&mut self, err: ExpandError) { let prev_err = self.err.take(); self.err = prev_err.or(Some(err)); @@ -111,12 +122,16 @@ impl Match { } /// Matching errors are added to the `Match`. -pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match { +pub(super) fn match_( + pattern: &MetaTemplate, + input: &tt::Subtree, + is_2021: bool, +) -> Match { let mut res = match_loop(pattern, input, is_2021); res.bound_count = count(res.bindings.bindings()); return res; - fn count<'a>(bindings: impl Iterator) -> usize { + fn count<'a, S: 'a>(bindings: impl Iterator>) -> usize { bindings .map(|it| match it { Binding::Fragment(_) => 1, @@ -129,10 +144,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) } #[derive(Debug, Clone)] -enum BindingKind { +enum BindingKind { Empty(SmolStr), Optional(SmolStr), - Fragment(SmolStr, Fragment), + Fragment(SmolStr, Fragment), Missing(SmolStr, MetaVarKind), Nested(usize, usize), } @@ -146,13 +161,18 @@ enum LinkNode { Parent { idx: usize, len: usize }, } -#[derive(Default)] -struct BindingsBuilder { - nodes: Vec>>>, +struct BindingsBuilder { + nodes: Vec>>>>, nested: Vec>>, } -impl BindingsBuilder { +impl Default for BindingsBuilder { + fn default() -> Self { + Self { nodes: Default::default(), nested: Default::default() } + } +} + +impl BindingsBuilder { fn alloc(&mut self) -> BindingsIdx { let idx = self.nodes.len(); self.nodes.push(Vec::new()); @@ -189,7 +209,7 @@ impl BindingsBuilder { self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); } - fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { + fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { self.nodes[idx.0] .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); } @@ -210,11 +230,11 @@ impl BindingsBuilder { idx.0 = new_idx; } - fn build(self, idx: &BindingsIdx) -> Bindings { + fn build(self, idx: &BindingsIdx) -> Bindings { self.build_inner(&self.nodes[idx.0]) } - fn build_inner(&self, link_nodes: &[LinkNode>]) -> Bindings { + fn build_inner(&self, link_nodes: &[LinkNode>>]) -> Bindings { let mut bindings = Bindings::default(); let mut nodes = Vec::new(); self.collect_nodes(link_nodes, &mut nodes); @@ -264,7 +284,7 @@ impl BindingsBuilder { &'a self, id: usize, len: usize, - nested_refs: &mut Vec<&'a [LinkNode>]>, + nested_refs: &mut Vec<&'a [LinkNode>>]>, ) { self.nested[id].iter().take(len).for_each(|it| match it { LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]), @@ -272,7 +292,7 @@ impl BindingsBuilder { }); } - fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec) { + fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec>) { let last = &self.nodes[idx]; let mut nested_refs: Vec<&[_]> = Vec::new(); self.nested[nested_idx].iter().for_each(|it| match *it { @@ -283,7 +303,7 @@ impl BindingsBuilder { nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter))); } - fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { + fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { self.nodes[id].iter().take(len).for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), @@ -292,8 +312,8 @@ impl BindingsBuilder { fn collect_nodes<'a>( &'a self, - link_nodes: &'a [LinkNode>], - nodes: &mut Vec<&'a BindingKind>, + link_nodes: &'a [LinkNode>>], + nodes: &mut Vec<&'a BindingKind>, ) { link_nodes.iter().for_each(|it| match it { LinkNode::Node(it) => nodes.push(it), @@ -303,22 +323,22 @@ impl BindingsBuilder { } #[derive(Debug, Clone)] -struct MatchState<'t> { +struct MatchState<'t, S> { /// The position of the "dot" in this matcher - dot: OpDelimitedIter<'t>, + dot: OpDelimitedIter<'t, S>, /// Token subtree stack /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. ) /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does /// that where the bottom of the stack is the outermost matcher. - stack: SmallVec<[OpDelimitedIter<'t>; 4]>, + stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the repetition. - up: Option>>, + up: Option>>, /// The separator if we are in a repetition. - sep: Option, + sep: Option>, /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, @@ -330,7 +350,7 @@ struct MatchState<'t> { bindings: BindingsIdx, /// Cached result of meta variable parsing - meta_result: Option<(TtIter<'t>, ExpandResult>)>, + meta_result: Option<(TtIter<'t, S>, ExpandResult>>)>, /// Is error occurred in this state, will `poised` to "parent" is_error: bool, @@ -355,16 +375,16 @@ struct MatchState<'t> { /// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `error_items`: the set of items in errors, used for error-resilient parsing #[inline] -fn match_loop_inner<'t>( - src: TtIter<'t>, - stack: &[TtIter<'t>], - res: &mut Match, - bindings_builder: &mut BindingsBuilder, - cur_items: &mut SmallVec<[MatchState<'t>; 1]>, - bb_items: &mut SmallVec<[MatchState<'t>; 1]>, - next_items: &mut Vec>, - eof_items: &mut SmallVec<[MatchState<'t>; 1]>, - error_items: &mut SmallVec<[MatchState<'t>; 1]>, +fn match_loop_inner<'t, S: Span>( + src: TtIter<'t, S>, + stack: &[TtIter<'t, S>], + res: &mut Match, + bindings_builder: &mut BindingsBuilder, + cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + next_items: &mut Vec>, + eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, + error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, is_2021: bool, ) { macro_rules! try_push { @@ -468,7 +488,7 @@ fn match_loop_inner<'t>( if let Ok(subtree) = src.clone().expect_subtree() { if subtree.delimiter.kind == delimiter.kind { item.stack.push(item.dot); - item.dot = tokens.iter_delimited(Some(delimiter)); + item.dot = tokens.iter_delimited(Some(*delimiter)); cur_items.push(item); } } @@ -587,9 +607,9 @@ fn match_loop_inner<'t>( } } -fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { +fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { let mut src = TtIter::new(src); - let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new(); + let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); let mut res = Match::default(); let mut error_recover_item = None; @@ -736,11 +756,11 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match } } -fn match_meta_var( +fn match_meta_var( kind: MetaVarKind, - input: &mut TtIter<'_>, + input: &mut TtIter<'_, S>, is_2021: bool, -) -> ExpandResult> { +) -> ExpandResult>> { let fragment = match kind { MetaVarKind::Path => { return input @@ -811,7 +831,7 @@ fn match_meta_var( input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) } -fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { +fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), @@ -824,38 +844,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) } } } -impl MetaTemplate { - fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> { +impl MetaTemplate { + fn iter_delimited(&self, delimited: Option>) -> OpDelimitedIter<'_, S> { OpDelimitedIter { inner: &self.0, idx: 0, - delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED), + delimited: delimited.unwrap_or(tt::Delimiter::UNSPECIFIED), } } } #[derive(Debug, Clone, Copy)] -enum OpDelimited<'a> { - Op(&'a Op), +enum OpDelimited<'a, S> { + Op(&'a Op), Open, Close, } #[derive(Debug, Clone, Copy)] -struct OpDelimitedIter<'a> { - inner: &'a [Op], - delimited: &'a tt::Delimiter, +struct OpDelimitedIter<'a, S> { + inner: &'a [Op], + delimited: tt::Delimiter, idx: usize, } -impl<'a> OpDelimitedIter<'a> { +impl<'a, S: Span> OpDelimitedIter<'a, S> { fn is_eof(&self) -> bool { let len = self.inner.len() + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; self.idx >= len } - fn peek(&self) -> Option> { + fn peek(&self) -> Option> { match self.delimited.kind { tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op), _ => match self.idx { @@ -871,8 +891,8 @@ impl<'a> OpDelimitedIter<'a> { } } -impl<'a> Iterator for OpDelimitedIter<'a> { - type Item = OpDelimited<'a>; +impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> { + type Item = OpDelimited<'a, S>; fn next(&mut self) -> Option { let res = self.peek(); @@ -888,8 +908,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } } -impl TtIter<'_> { - fn expect_separator(&mut self, separator: &Separator) -> bool { +impl TtIter<'_, S> { + fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { @@ -919,7 +939,7 @@ impl TtIter<'_> { ok } - fn expect_tt(&mut self) -> Result { + fn expect_tt(&mut self) -> Result, ()> { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { if punct.char == '\'' { self.expect_lifetime() @@ -936,7 +956,7 @@ impl TtIter<'_> { } } - fn expect_lifetime(&mut self) -> Result { + fn expect_lifetime(&mut self) -> Result, ()> { let punct = self.expect_single_punct()?; if punct.char != '\'' { return Err(()); @@ -953,7 +973,7 @@ impl TtIter<'_> { .into()) } - fn eat_char(&mut self, c: char) -> Option { + fn eat_char(&mut self, c: char) -> Option> { let mut fork = self.clone(); match fork.expect_char(c) { Ok(_) => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index cdac2f1e3bb8c..4f5cd0480c78d 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -2,20 +2,20 @@ //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` use syntax::SmolStr; +use tt::{Delimiter, Span}; use crate::{ expander::{Binding, Bindings, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, - tt::{self, Delimiter}, CountError, ExpandError, ExpandResult, MetaTemplate, }; -impl Bindings { +impl Bindings { fn contains(&self, name: &str) -> bool { self.inner.contains_key(name) } - fn get(&self, name: &str) -> Result<&Binding, ExpandError> { + fn get(&self, name: &str) -> Result<&Binding, ExpandError> { match self.inner.get(name) { Some(binding) => Ok(binding), None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))), @@ -26,7 +26,7 @@ impl Bindings { &self, name: &str, nesting: &mut [NestingState], - ) -> Result { + ) -> Result, ExpandError> { macro_rules! binding_err { ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; } @@ -54,15 +54,15 @@ impl Bindings { Binding::Missing(it) => Ok(match it { MetaVarKind::Stmt => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::unspecified(), + span: S::DUMMY, char: ';', spacing: tt::Spacing::Alone, }))) } MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: S::DUMMY, + close: S::DUMMY, kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -82,19 +82,19 @@ impl Bindings { | MetaVarKind::Ident => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("missing"), - span: tt::TokenId::unspecified(), + span: S::DUMMY, }))) } MetaVarKind::Lifetime => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("'missing"), - span: tt::TokenId::unspecified(), + span: S::DUMMY, }))) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("\"missing\""), - span: tt::TokenId::unspecified(), + span: S::DUMMY, }))) } }), @@ -108,12 +108,12 @@ impl Bindings { } } -pub(super) fn transcribe( - template: &MetaTemplate, - bindings: &Bindings, -) -> ExpandResult { +pub(super) fn transcribe( + template: &MetaTemplate, + bindings: &Bindings, +) -> ExpandResult> { let mut ctx = ExpandCtx { bindings, nesting: Vec::new() }; - let mut arena: Vec = Vec::new(); + let mut arena: Vec> = Vec::new(); expand_subtree(&mut ctx, template, None, &mut arena) } @@ -129,17 +129,17 @@ struct NestingState { } #[derive(Debug)] -struct ExpandCtx<'a> { - bindings: &'a Bindings, +struct ExpandCtx<'a, S> { + bindings: &'a Bindings, nesting: Vec, } -fn expand_subtree( - ctx: &mut ExpandCtx<'_>, - template: &MetaTemplate, - delimiter: Option, - arena: &mut Vec, -) -> ExpandResult { +fn expand_subtree( + ctx: &mut ExpandCtx<'_, S>, + template: &MetaTemplate, + delimiter: Option>, + arena: &mut Vec>, +) -> ExpandResult> { // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation let start_elements = arena.len(); let mut err = None; @@ -180,7 +180,7 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), - span: tt::TokenId::unspecified(), + span: S::DUMMY, }) .into(), ); @@ -237,11 +237,8 @@ fn expand_subtree( } }; arena.push( - tt::Leaf::Literal(tt::Literal { - text: c.to_string().into(), - span: tt::TokenId::unspecified(), - }) - .into(), + tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), span: S::DUMMY }) + .into(), ); } } @@ -257,7 +254,11 @@ fn expand_subtree( } } -fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult { +fn expand_var( + ctx: &mut ExpandCtx<'_, S>, + v: &SmolStr, + id: S, +) -> ExpandResult> { // We already handle $crate case in mbe parser debug_assert!(v != "crate"); @@ -296,14 +297,14 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe } } -fn expand_repeat( - ctx: &mut ExpandCtx<'_>, - template: &MetaTemplate, +fn expand_repeat( + ctx: &mut ExpandCtx<'_, S>, + template: &MetaTemplate, kind: RepeatKind, - separator: &Option, - arena: &mut Vec, -) -> ExpandResult { - let mut buf: Vec = Vec::new(); + separator: &Option>, + arena: &mut Vec>, +) -> ExpandResult> { + let mut buf: Vec> = Vec::new(); ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); // Dirty hack to make macro-expansion terminate. // This should be replaced by a proper macro-by-example implementation @@ -342,7 +343,7 @@ fn expand_repeat( continue; } - t.delimiter = tt::Delimiter::unspecified(); + t.delimiter = tt::Delimiter::UNSPECIFIED; push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -376,7 +377,7 @@ fn expand_repeat( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into(); + let tt = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: buf }.into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { @@ -387,14 +388,14 @@ fn expand_repeat( ExpandResult { value: Fragment::Tokens(tt), err } } -fn push_fragment(buf: &mut Vec, fragment: Fragment) { +fn push_fragment(buf: &mut Vec>, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => { if tt.delimiter.kind == tt::DelimiterKind::Invisible { tt.delimiter = tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, + open: S::DUMMY, + close: S::DUMMY, kind: tt::DelimiterKind::Parenthesis, }; } @@ -405,7 +406,7 @@ fn push_fragment(buf: &mut Vec, fragment: Fragment) { } } -fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { +fn push_subtree(buf: &mut Vec>, tt: tt::Subtree) { match tt.delimiter.kind { tt::DelimiterKind::Invisible => buf.extend(tt.token_trees), _ => buf.push(tt.into()), @@ -415,7 +416,7 @@ fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { /// Inserts the path separator `::` between an identifier and its following generic /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// we need this fixup. -fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { +fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt::Subtree) { stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); let mut prev_was_ident = false; // Note that we only need to fix up the top-level `TokenTree`s because the @@ -432,7 +433,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Joint, - span: tt::Span::unspecified(), + span: S::DUMMY, }) .into(), ); @@ -440,7 +441,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Alone, - span: tt::Span::unspecified(), + span: S::DUMMY, }) .into(), ); @@ -453,9 +454,9 @@ fn fix_up_and_push_path_tt(buf: &mut Vec, subtree: tt::Subtree) { /// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth /// defined by the metavar expression. -fn count( - ctx: &ExpandCtx<'_>, - binding: &Binding, +fn count( + ctx: &ExpandCtx<'_, S>, + binding: &Binding, our_depth: usize, count_depth: Option, ) -> Result { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index a439c9c50d6c6..46599802935e1 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -18,8 +18,8 @@ mod to_parser_input; mod benchmark; mod token_map; -use ::tt::token_id as tt; use stdx::impl_from; +use tt::{Span, TokenId}; use std::fmt; @@ -28,8 +28,9 @@ use crate::{ tt_iter::TtIter, }; -pub use self::tt::{Delimiter, DelimiterKind, Punct}; +// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; +pub use tt::{Delimiter, DelimiterKind, Punct}; pub use crate::{ syntax_bridge::{ @@ -125,7 +126,7 @@ impl fmt::Display for CountError { /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] pub struct DeclarativeMacro { - rules: Box<[Rule]>, + rules: Box<[Rule]>, /// Highest id of the token we have in TokenMap shift: Shift, // This is used for correctly determining the behavior of the pat fragment @@ -135,23 +136,23 @@ pub struct DeclarativeMacro { } #[derive(Clone, Debug, PartialEq, Eq)] -struct Rule { - lhs: MetaTemplate, - rhs: MetaTemplate, +struct Rule { + lhs: MetaTemplate, + rhs: MetaTemplate, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Shift(u32); impl Shift { - pub fn new(tt: &tt::Subtree) -> Shift { + pub fn new(tt: &tt::Subtree) -> Shift { // Note that TokenId is started from zero, // We have to add 1 to prevent duplication. let value = max_id(tt).map_or(0, |it| it + 1); return Shift(value); // Find the max token id inside a subtree - fn max_id(subtree: &tt::Subtree) -> Option { + fn max_id(subtree: &tt::Subtree) -> Option { let filter = |tt: &_| match tt { tt::TokenTree::Subtree(subtree) => { @@ -177,7 +178,7 @@ impl Shift { } /// Shift given TokenTree token id - pub fn shift_all(self, tt: &mut tt::Subtree) { + pub fn shift_all(self, tt: &mut tt::Subtree) { for t in &mut tt.token_trees { match t { tt::TokenTree::Leaf( @@ -224,7 +225,7 @@ impl DeclarativeMacro { } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -260,7 +261,7 @@ impl DeclarativeMacro { } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -310,7 +311,7 @@ impl DeclarativeMacro { DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } } - pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult { + pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult> { self.shift.shift_all(&mut tt); expander::expand_rules(&self.rules, &tt, self.is_2021) } @@ -335,8 +336,8 @@ impl DeclarativeMacro { } } -impl Rule { - fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result { +impl Rule { + fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result { let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?; if expect_arrow { src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?; @@ -351,7 +352,7 @@ impl Rule { } } -fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { +fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { for op in pattern.iter() { match op { Op::Subtree { tokens, .. } => validate(tokens)?, diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 7a143e7466a93..00ba35377a427 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -3,8 +3,9 @@ use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; +use tt::Span; -use crate::{tt, tt_iter::TtIter, ParseError}; +use crate::{tt_iter::TtIter, ParseError}; /// Consider /// @@ -20,22 +21,22 @@ use crate::{tt, tt_iter::TtIter, ParseError}; /// Stuff to the right is a [`MetaTemplate`] template which is used to produce /// output. #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); +pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>); -impl MetaTemplate { - pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { +impl MetaTemplate { + pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result { MetaTemplate::parse(pattern, Mode::Pattern) } - pub(crate) fn parse_template(template: &tt::Subtree) -> Result { + pub(crate) fn parse_template(template: &tt::Subtree) -> Result { MetaTemplate::parse(template, Mode::Template) } - pub(crate) fn iter(&self) -> impl Iterator { + pub(crate) fn iter(&self) -> impl Iterator> { self.0.iter() } - fn parse(tt: &tt::Subtree, mode: Mode) -> Result { + fn parse(tt: &tt::Subtree, mode: Mode) -> Result { let mut src = TtIter::new(tt); let mut res = Vec::new(); @@ -49,16 +50,16 @@ impl MetaTemplate { } #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) enum Op { - Var { name: SmolStr, kind: Option, id: tt::TokenId }, - Ignore { name: SmolStr, id: tt::TokenId }, +pub(crate) enum Op { + Var { name: SmolStr, kind: Option, id: S }, + Ignore { name: SmolStr, id: S }, Index { depth: usize }, Count { name: SmolStr, depth: Option }, - Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, - Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, - Literal(tt::Literal), - Punct(SmallVec<[tt::Punct; 3]>), - Ident(tt::Ident), + Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option> }, + Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, + Literal(tt::Literal), + Punct(SmallVec<[tt::Punct; 3]>), + Ident(tt::Ident), } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -87,15 +88,15 @@ pub(crate) enum MetaVarKind { } #[derive(Clone, Debug, Eq)] -pub(crate) enum Separator { - Literal(tt::Literal), - Ident(tt::Ident), - Puncts(SmallVec<[tt::Punct; 3]>), +pub(crate) enum Separator { + Literal(tt::Literal), + Ident(tt::Ident), + Puncts(SmallVec<[tt::Punct; 3]>), } // Note that when we compare a Separator, we just care about its textual value. -impl PartialEq for Separator { - fn eq(&self, other: &Separator) -> bool { +impl PartialEq for Separator { + fn eq(&self, other: &Separator) -> bool { use Separator::*; match (self, other) { @@ -117,11 +118,11 @@ enum Mode { Template, } -fn next_op( - first_peeked: &tt::TokenTree, - src: &mut TtIter<'_>, +fn next_op( + first_peeked: &tt::TokenTree, + src: &mut TtIter<'_, S>, mode: Mode, -) -> Result { +) -> Result, ParseError> { let res = match first_peeked { tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { src.next().expect("first token already peeked"); @@ -212,7 +213,10 @@ fn next_op( Ok(res) } -fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result, ParseError> { +fn eat_fragment_kind( + src: &mut TtIter<'_, S>, + mode: Mode, +) -> Result, ParseError> { if let Mode::Pattern = mode { src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?; let ident = src @@ -240,11 +244,13 @@ fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result bool { +fn is_boolean_literal(lit: &tt::Literal) -> bool { matches!(lit.text.as_str(), "true" | "false") } -fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), ParseError> { +fn parse_repeat( + src: &mut TtIter<'_, S>, +) -> Result<(Option>, RepeatKind), ParseError> { let mut separator = Separator::Puncts(SmallVec::new()); for tt in src { let tt = match tt { @@ -281,7 +287,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), Err(ParseError::InvalidRepeat) } -fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { +fn parse_metavar_expr(src: &mut TtIter<'_, S>) -> Result, ()> { let func = src.expect_ident()?; let args = src.expect_subtree()?; @@ -314,7 +320,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { Ok(op) } -fn parse_depth(src: &mut TtIter<'_>) -> Result { +fn parse_depth(src: &mut TtIter<'_, S>) -> Result { if src.len() == 0 { Ok(0) } else if let tt::Leaf::Literal(lit) = src.expect_literal()? { @@ -325,7 +331,7 @@ fn parse_depth(src: &mut TtIter<'_>) -> Result { } } -fn try_eat_comma(src: &mut TtIter<'_>) -> bool { +fn try_eat_comma(src: &mut TtIter<'_, S>) -> bool { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) { let _ = src.next(); return true; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 7b9bb61e696ad..01aab6b659cb9 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -8,23 +8,19 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; - -use crate::{ - to_parser_input::to_parser_input, - tt::{ - self, - buffer::{Cursor, TokenBuffer}, - }, - tt_iter::TtIter, - TokenMap, +use tt::{ + buffer::{Cursor, TokenBuffer}, + TokenId, }; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; + #[cfg(test)] mod tests; /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { +pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications( node, Default::default(), @@ -43,7 +39,7 @@ pub fn syntax_node_to_token_tree_with_modifications( next_id: u32, replace: FxHashMap>, append: FxHashMap>, -) -> (tt::Subtree, TokenMap, u32) { +) -> (tt::Subtree, TokenMap, u32) { let global_offset = node.text_range().start(); let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); let subtree = convert_tokens(&mut c); @@ -108,7 +104,7 @@ pub struct SyntheticToken { // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> pub fn token_tree_to_syntax_node( - tt: &tt::Subtree, + tt: &tt::Subtree, entry_point: parser::TopEntryPoint, ) -> (Parse, TokenMap) { let buffer = match tt { @@ -138,7 +134,7 @@ pub fn token_tree_to_syntax_node( } /// Convert a string to a `TokenTree` -pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { +pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; @@ -159,7 +155,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -195,9 +191,9 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res } -fn convert_tokens(conv: &mut C) -> tt::Subtree { +fn convert_tokens(conv: &mut C) -> tt::Subtree { struct StackEntry { - subtree: tt::Subtree, + subtree: tt::Subtree, idx: usize, open_range: TextRange, } @@ -296,7 +292,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { .into() }; } - let leaf: tt::Leaf = match kind { + let leaf: tt::Leaf = match kind { T![true] | T![false] => make_leaf!(Ident), IDENT => make_leaf!(Ident), UNDERSCORE => make_leaf!(Ident), @@ -335,7 +331,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { let parent = stack.last_mut(); conv.id_alloc().close_delim(entry.idx, None); - let leaf: tt::Leaf = tt::Punct { + let leaf: tt::Leaf = tt::Punct { span: conv.id_alloc().alloc(entry.open_range, None), char: match entry.subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => '(', @@ -514,7 +510,7 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { fn convert_doc_comment( token: &syntax::SyntaxToken, span: tt::TokenId, -) -> Option> { +) -> Option>> { cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; @@ -537,11 +533,11 @@ fn convert_doc_comment( return Some(token_trees); // Helper functions - fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { + fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })) } - fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { + fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, @@ -549,7 +545,7 @@ fn convert_doc_comment( })) } - fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { + fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { let lit = tt::Literal { text: doc_comment_text(comment), span }; tt::TokenTree::from(tt::Leaf::from(lit)) @@ -636,7 +632,7 @@ trait TokenConverter: Sized { &self, token: &Self::Token, span: tt::TokenId, - ) -> Option>; + ) -> Option>>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>; @@ -666,7 +662,11 @@ impl SrcToken> for usize { impl TokenConverter for RawConverter<'_> { type Token = usize; - fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option> { + fn convert_doc_comment( + &self, + &token: &usize, + span: tt::TokenId, + ) -> Option>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } @@ -819,7 +819,7 @@ impl TokenConverter for Converter { &self, token: &Self::Token, span: tt::TokenId, - ) -> Option> { + ) -> Option>> { convert_doc_comment(token.token()?, span) } @@ -899,7 +899,7 @@ impl TokenConverter for Converter { struct TtTreeSink<'a> { buf: String, - cursor: Cursor<'a>, + cursor: Cursor<'a, TokenId>, open_delims: FxHashMap, text_pos: TextSize, inner: SyntaxTreeBuilder, @@ -907,7 +907,7 @@ struct TtTreeSink<'a> { } impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a>) -> Self { + fn new(cursor: Cursor<'a, TokenId>) -> Self { TtTreeSink { buf: String::new(), cursor, diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 051e20b3a3f9c..00a14f04686e2 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -3,9 +3,9 @@ use syntax::{SyntaxKind, SyntaxKind::*, T}; -use crate::tt::buffer::TokenBuffer; +use tt::{buffer::TokenBuffer, Span}; -pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { +pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_, S>) -> parser::Input { let mut res = parser::Input::default(); let mut current = buffer.begin(); diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 79ff8ca28e863..44fbbcfc2068b 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -3,16 +3,17 @@ use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; +use tt::Span; -use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult}; +use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; #[derive(Debug, Clone)] -pub(crate) struct TtIter<'a> { - pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, +pub(crate) struct TtIter<'a, S> { + pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, } -impl<'a> TtIter<'a> { - pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> { +impl<'a, S: Span> TtIter<'a, S> { + pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a, S> { TtIter { inner: subtree.token_trees.iter() } } @@ -36,35 +37,35 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { + pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { match self.next() { Some(tt::TokenTree::Subtree(it)) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { + pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { match self.next() { Some(tt::TokenTree::Leaf(it)) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { + pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) if it.text != "_" => Ok(it), _ => Err(()), } } - pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> { + pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> { match self.expect_leaf()? { tt::Leaf::Ident(it) => Ok(it), _ => Err(()), } } - pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { + pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { let it = self.expect_leaf()?; match it { tt::Leaf::Literal(_) => Ok(it), @@ -73,7 +74,7 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { + pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { match self.expect_leaf()? { tt::Leaf::Punct(it) => Ok(it), _ => Err(()), @@ -84,7 +85,7 @@ impl<'a> TtIter<'a> { /// /// This method currently may return a single quotation, which is part of lifetime ident and /// conceptually not a punct in the context of mbe. Callers should handle this. - pub(crate) fn expect_glued_punct(&mut self) -> Result, ()> { + pub(crate) fn expect_glued_punct(&mut self) -> Result; 3]>, ()> { let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { return Err(()); }; @@ -126,7 +127,7 @@ impl<'a> TtIter<'a> { pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, - ) -> ExpandResult> { + ) -> ExpandResult>> { let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let parser_input = to_parser_input(&buffer); let tree_traversal = entry_point.parse(&parser_input); @@ -181,13 +182,13 @@ impl<'a> TtIter<'a> { ExpandResult { value: res, err } } - pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { + pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { self.inner.as_slice().get(n) } } -impl<'a> Iterator for TtIter<'a> { - type Item = &'a tt::TokenTree; +impl<'a, S> Iterator for TtIter<'a, S> { + type Item = &'a tt::TokenTree; fn next(&mut self) -> Option { self.inner.next() } @@ -197,4 +198,4 @@ impl<'a> Iterator for TtIter<'a> { } } -impl std::iter::ExactSizeIterator for TtIter<'_> {} +impl std::iter::ExactSizeIterator for TtIter<'_, S> {} diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index b5a72bec079a4..a4ffc328f2171 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -47,70 +47,44 @@ pub mod token_id { pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>; pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>; } +} - impl Delimiter { - pub const UNSPECIFIED: Self = Self { - open: TokenId::UNSPECIFIED, - close: TokenId::UNSPECIFIED, - kind: DelimiterKind::Invisible, - }; - pub const fn unspecified() -> Self { - Self::UNSPECIFIED - } - } - impl Subtree { - pub const fn empty() -> Self { - Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] } - } - } - impl TokenTree { - pub const fn empty() -> Self { - Self::Subtree(Subtree::empty()) - } - } +pub trait Span: std::fmt::Debug + Copy + Sized { + const DUMMY: Self; + fn is_dummy(&self) -> bool; +} +impl Span for TokenId { + const DUMMY: Self = TokenId(!0); - impl Subtree { - pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) { - self.delimiter.open = f(self.delimiter.open); - self.delimiter.close = f(self.delimiter.close); - self.token_trees.iter_mut().for_each(|tt| match tt { - crate::TokenTree::Leaf(leaf) => match leaf { - crate::Leaf::Literal(it) => it.span = f(it.span), - crate::Leaf::Punct(it) => it.span = f(it.span), - crate::Leaf::Ident(it) => it.span = f(it.span), - }, - crate::TokenTree::Subtree(s) => s.visit_ids(f), - }) - } + fn is_dummy(&self) -> bool { + *self == Self::DUMMY } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct SyntaxContext(pub u32); -// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -// pub struct Span { -// pub id: TokenId, -// pub ctx: SyntaxContext, -// } -// pub type Span = (TokenId, SyntaxContext); - #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TokenTree { - Leaf(Leaf), - Subtree(Subtree), +pub enum TokenTree { + Leaf(Leaf), + Subtree(Subtree), +} +impl_from!(Leaf, Subtree for TokenTree); +impl TokenTree { + pub const fn empty() -> Self { + Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }) + } } -impl_from!(Leaf, Subtree for TokenTree); #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Leaf { - Literal(Literal), - Punct(Punct), - Ident(Ident), +pub enum Leaf { + Literal(Literal), + Punct(Punct), + Ident(Ident), } -impl Leaf { - pub fn span(&self) -> &Span { +impl Leaf { + pub fn span(&self) -> &S { match self { Leaf::Literal(it) => &it.span, Leaf::Punct(it) => &it.span, @@ -118,21 +92,49 @@ impl Leaf { } } } -impl_from!(Literal, Punct, Ident for Leaf); +impl_from!(Literal, Punct, Ident for Leaf); #[derive(Clone, PartialEq, Eq, Hash)] -pub struct Subtree { - pub delimiter: Delimiter, - pub token_trees: Vec>, +pub struct Subtree { + pub delimiter: Delimiter, + pub token_trees: Vec>, +} + +impl Subtree { + pub const fn empty() -> Self { + Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] } + } + + pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) { + self.delimiter.open = f(self.delimiter.open); + self.delimiter.close = f(self.delimiter.close); + self.token_trees.iter_mut().for_each(|tt| match tt { + crate::TokenTree::Leaf(leaf) => match leaf { + crate::Leaf::Literal(it) => it.span = f(it.span), + crate::Leaf::Punct(it) => it.span = f(it.span), + crate::Leaf::Ident(it) => it.span = f(it.span), + }, + crate::TokenTree::Subtree(s) => s.visit_ids(f), + }) + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Delimiter { - pub open: Span, - pub close: Span, +pub struct Delimiter { + pub open: S, + pub close: S, pub kind: DelimiterKind, } +impl Delimiter { + pub const UNSPECIFIED: Self = + Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; + pub const fn unspecified() -> Self { + Self::UNSPECIFIED + } +} + + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum DelimiterKind { Parenthesis, @@ -142,16 +144,16 @@ pub enum DelimiterKind { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Literal { +pub struct Literal { pub text: SmolStr, - pub span: Span, + pub span: S, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Punct { +pub struct Punct { pub char: char, pub spacing: Spacing, - pub span: Span, + pub span: S, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -162,9 +164,9 @@ pub enum Spacing { #[derive(Debug, Clone, PartialEq, Eq, Hash)] /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. -pub struct Ident { +pub struct Ident { pub text: SmolStr, - pub span: Span, + pub span: S, } impl Ident { @@ -173,9 +175,9 @@ impl Ident { } } -fn print_debug_subtree( +fn print_debug_subtree( f: &mut fmt::Formatter<'_>, - subtree: &Subtree, + subtree: &Subtree, level: usize, ) -> fmt::Result { let align = " ".repeat(level); @@ -203,9 +205,9 @@ fn print_debug_subtree( Ok(()) } -fn print_debug_token( +fn print_debug_token( f: &mut fmt::Formatter<'_>, - tkn: &TokenTree, + tkn: &TokenTree, level: usize, ) -> fmt::Result { let align = " ".repeat(level); @@ -231,13 +233,13 @@ fn print_debug_token( Ok(()) } -impl fmt::Debug for Subtree { +impl fmt::Debug for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { print_debug_subtree(f, self, 0) } } -impl fmt::Display for TokenTree { +impl fmt::Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TokenTree::Leaf(it) => fmt::Display::fmt(it, f), @@ -246,7 +248,7 @@ impl fmt::Display for TokenTree { } } -impl fmt::Display for Subtree { +impl fmt::Display for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let (l, r) = match self.delimiter.kind { DelimiterKind::Parenthesis => ("(", ")"), @@ -274,7 +276,7 @@ impl fmt::Display for Subtree { } } -impl fmt::Display for Leaf { +impl fmt::Display for Leaf { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Leaf::Ident(it) => fmt::Display::fmt(it, f), @@ -284,25 +286,25 @@ impl fmt::Display for Leaf { } } -impl fmt::Display for Ident { +impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Literal { +impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Punct { +impl fmt::Display for Punct { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.char, f) } } -impl Subtree { +impl Subtree { /// Count the number of tokens recursively pub fn count(&self) -> usize { let children_count = self @@ -318,7 +320,7 @@ impl Subtree { } } -impl Subtree { +impl Subtree { /// A simple line string used for debugging pub fn as_debug_string(&self) -> String { let delim = match self.delimiter.kind { @@ -366,8 +368,8 @@ impl Subtree { pub mod buffer; -pub fn pretty(tkns: &[TokenTree]) -> String { - fn tokentree_to_text(tkn: &TokenTree) -> String { +pub fn pretty(tkns: &[TokenTree]) -> String { + fn tokentree_to_text(tkn: &TokenTree) -> String { match tkn { TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(), TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(), From f79439caedee7ed00252aff8d4d4fae8fac0c597 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 29 Jun 2023 12:23:45 +0200 Subject: [PATCH 03/80] Infect proc-macro-api crate with generic span type parameter --- crates/proc-macro-api/src/lib.rs | 12 +- crates/proc-macro-api/src/msg.rs | 23 ++- crates/proc-macro-api/src/msg/flat.rs | 230 ++++++++++++++++++-------- 3 files changed, 180 insertions(+), 85 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1603458f756ee..7a3580f814e27 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -17,10 +17,8 @@ use triomphe::Arc; use serde::{Deserialize, Serialize}; -use ::tt::token_id as tt; - use crate::{ - msg::{ExpandMacro, FlatTree, PanicMessage}, + msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage}, process::ProcMacroProcessSrv, }; @@ -134,12 +132,12 @@ impl ProcMacro { self.kind } - pub fn expand( + pub fn expand>( &self, - subtree: &tt::Subtree, - attr: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - ) -> Result, ServerError> { + ) -> Result, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 4b01643c2a298..f0719777ab4df 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -16,8 +16,15 @@ pub use crate::msg::flat::FlatTree; pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; +/// This version changes how spans are encoded, kind of. Prior to this version, +/// spans were represented as a single u32 which effectively forced spans to be +/// token ids. Starting with this version, the span fields are still u32, +/// but if the size of the span is greater than 1 then the span data is encoded in +/// an additional vector where the span represents the offset into that vector. +/// This allows encoding bigger spans while supporting the previous versions. +pub const VARIABLE_SIZED_SPANS: u32 = 2; -pub const CURRENT_API_VERSION: u32 = ENCODE_CLOSE_SPAN_VERSION; +pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS; #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -115,10 +122,14 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { #[cfg(test)] mod tests { + use tt::{ + Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Span, Subtree, TokenId, + TokenTree, + }; + use super::*; - use crate::tt::*; - fn fixture_token_tree() -> Subtree { + fn fixture_token_tree() -> Subtree { let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() }; subtree .token_trees @@ -128,17 +139,17 @@ mod tests { .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into())); subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: TokenId::unspecified(), + span: TokenId::DUMMY, }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: TokenId::unspecified(), + span: TokenId::DUMMY, spacing: Spacing::Joint, }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { open: TokenId(2), - close: TokenId::UNSPECIFIED, + close: TokenId::DUMMY, kind: DelimiterKind::Brace, }, token_trees: vec![], diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 44245336f0166..fe82b8d045420 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -38,11 +38,22 @@ use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; +use tt::Span; -use crate::{ - msg::ENCODE_CLOSE_SPAN_VERSION, - tt::{self, TokenId}, -}; +use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; + +pub trait SerializableSpan: Span { + fn into_u32(self) -> [u32; L]; + fn from_u32(input: [u32; L]) -> Self; +} +impl SerializableSpan<1> for tt::TokenId { + fn into_u32(self) -> [u32; 1] { + [self.0] + } + fn from_u32([input]: [u32; 1]) -> Self { + tt::TokenId(input) + } +} #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { @@ -52,33 +63,79 @@ pub struct FlatTree { ident: Vec, token_tree: Vec, text: Vec, + #[serde(skip_serializing_if = "SpanMap::do_serialize")] + #[serde(default)] + span_map: SpanMap, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct SpanMap { + #[serde(skip_serializing)] + serialize: bool, + span_size: u32, + spans: Vec, +} + +impl Default for SpanMap { + fn default() -> Self { + Self { serialize: false, span_size: 1, spans: Default::default() } + } } -struct SubtreeRepr { - open: tt::TokenId, - close: tt::TokenId, +impl SpanMap { + fn serialize_span>(&mut self, span: S) -> u32 { + let u32s = span.into_u32(); + if L == 1 { + u32s[0] + } else { + let offset = self.spans.len() as u32; + self.spans.extend(u32s); + offset + } + } + fn deserialize_span>(&self, offset: u32) -> S { + S::from_u32(if L == 1 { + [offset].as_ref().try_into().unwrap() + } else { + self.spans[offset as usize..][..L].try_into().unwrap() + }) + } +} + +impl SpanMap { + pub fn do_serialize(&self) -> bool { + self.serialize + } +} + +struct SubtreeRepr { + open: S, + close: S, kind: tt::DelimiterKind, tt: [u32; 2], } -struct LiteralRepr { - id: tt::TokenId, +struct LiteralRepr { + id: S, text: u32, } -struct PunctRepr { - id: tt::TokenId, +struct PunctRepr { + id: S, char: char, spacing: tt::Spacing, } -struct IdentRepr { - id: tt::TokenId, +struct IdentRepr { + id: S, text: u32, } impl FlatTree { - pub fn new(subtree: &tt::Subtree, version: u32) -> FlatTree { + pub fn new>( + subtree: &tt::Subtree, + version: u32, + ) -> FlatTree { let mut w = Writer { string_table: HashMap::new(), work: VecDeque::new(), @@ -91,60 +148,78 @@ impl FlatTree { text: Vec::new(), }; w.write(subtree); - + assert!(L == 1 || version >= VARIABLE_SIZED_SPANS); + let mut span_map = SpanMap { + serialize: version >= VARIABLE_SIZED_SPANS && L != 1, + span_size: L as u32, + spans: Vec::new(), + }; return FlatTree { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - write_vec(w.subtree, SubtreeRepr::write_with_close_span) + write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span) } else { - write_vec(w.subtree, SubtreeRepr::write) + write_vec(&mut span_map, w.subtree, SubtreeRepr::write) }, - literal: write_vec(w.literal, LiteralRepr::write), - punct: write_vec(w.punct, PunctRepr::write), - ident: write_vec(w.ident, IdentRepr::write), + literal: write_vec(&mut span_map, w.literal, LiteralRepr::write), + punct: write_vec(&mut span_map, w.punct, PunctRepr::write), + ident: write_vec(&mut span_map, w.ident, IdentRepr::write), token_tree: w.token_tree, text: w.text, + span_map, }; - fn write_vec [u32; N], const N: usize>(xs: Vec, f: F) -> Vec { - xs.into_iter().flat_map(f).collect() + fn write_vec [u32; N], const N: usize>( + map: &mut SpanMap, + xs: Vec, + f: F, + ) -> Vec { + xs.into_iter().flat_map(|it| f(it, map)).collect() } } - pub fn to_subtree(self, version: u32) -> tt::Subtree { + pub fn to_subtree>( + self, + version: u32, + ) -> tt::Subtree { + assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size); return Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - read_vec(self.subtree, SubtreeRepr::read_with_close_span) + read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span) } else { - read_vec(self.subtree, SubtreeRepr::read) + read_vec(&self.span_map, self.subtree, SubtreeRepr::read) }, - literal: read_vec(self.literal, LiteralRepr::read), - punct: read_vec(self.punct, PunctRepr::read), - ident: read_vec(self.ident, IdentRepr::read), + literal: read_vec(&self.span_map, self.literal, LiteralRepr::read), + punct: read_vec(&self.span_map, self.punct, PunctRepr::read), + ident: read_vec(&self.span_map, self.ident, IdentRepr::read), token_tree: self.token_tree, text: self.text, } .read(); - fn read_vec T, const N: usize>(xs: Vec, f: F) -> Vec { + fn read_vec T, const N: usize>( + map: &SpanMap, + xs: Vec, + f: F, + ) -> Vec { let mut chunks = xs.chunks_exact(N); - let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect(); + let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect(); assert!(chunks.remainder().is_empty()); res } } } -impl SubtreeRepr { - fn write(self) -> [u32; 4] { +impl> SubtreeRepr { + fn write(self, map: &mut SpanMap) -> [u32; 4] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [self.open.0, kind, self.tt[0], self.tt[1]] + [map.serialize_span(self.open), kind, self.tt[0], self.tt[1]] } - fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr { + fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -152,18 +227,24 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId::UNSPECIFIED, kind, tt: [lo, len] } + SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] } } - fn write_with_close_span(self) -> [u32; 5] { + fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [self.open.0, self.close.0, kind, self.tt[0], self.tt[1]] + [ + map.serialize_span(self.open), + map.serialize_span(self.close), + kind, + self.tt[0], + self.tt[1], + ] } - fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr { + fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -171,67 +252,72 @@ impl SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] } + SubtreeRepr { + open: map.deserialize_span(open), + close: map.deserialize_span(close), + kind, + tt: [lo, len], + } } } -impl LiteralRepr { - fn write(self) -> [u32; 2] { - [self.id.0, self.text] +impl> LiteralRepr { + fn write(self, map: &mut SpanMap) -> [u32; 2] { + [map.serialize_span(self.id), self.text] } - fn read([id, text]: [u32; 2]) -> LiteralRepr { - LiteralRepr { id: TokenId(id), text } + fn read([id, text]: [u32; 2], map: &SpanMap) -> Self { + LiteralRepr { id: map.deserialize_span(id), text } } } -impl PunctRepr { - fn write(self) -> [u32; 3] { +impl> PunctRepr { + fn write(self, map: &mut SpanMap) -> [u32; 3] { let spacing = match self.spacing { tt::Spacing::Alone => 0, tt::Spacing::Joint => 1, }; - [self.id.0, self.char as u32, spacing] + [map.serialize_span(self.id), self.char as u32, spacing] } - fn read([id, char, spacing]: [u32; 3]) -> PunctRepr { + fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self { let spacing = match spacing { 0 => tt::Spacing::Alone, 1 => tt::Spacing::Joint, other => panic!("bad spacing {other}"), }; - PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } + PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing } } } -impl IdentRepr { - fn write(self) -> [u32; 2] { - [self.id.0, self.text] +impl> IdentRepr { + fn write(self, map: &mut SpanMap) -> [u32; 2] { + [map.serialize_span(self.id), self.text] } - fn read(data: [u32; 2]) -> IdentRepr { - IdentRepr { id: TokenId(data[0]), text: data[1] } + fn read(data: [u32; 2], map: &SpanMap) -> Self { + IdentRepr { id: map.deserialize_span(data[0]), text: data[1] } } } -struct Writer<'a> { - work: VecDeque<(usize, &'a tt::Subtree)>, +struct Writer<'a, const L: usize, S> { + work: VecDeque<(usize, &'a tt::Subtree)>, string_table: HashMap<&'a str, u32>, - subtree: Vec, - literal: Vec, - punct: Vec, - ident: Vec, + subtree: Vec>, + literal: Vec>, + punct: Vec>, + ident: Vec>, token_tree: Vec, text: Vec, } -impl<'a> Writer<'a> { - fn write(&mut self, root: &'a tt::Subtree) { +impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { + fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { self.subtree(idx, subtree); } } - fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { + fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.token_trees.len(); self.token_tree.resize(first_tt + n_tt, !0); @@ -273,7 +359,7 @@ impl<'a> Writer<'a> { } } - fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { + fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { let idx = self.subtree.len(); let open = subtree.delimiter.open; let close = subtree.delimiter.close; @@ -293,18 +379,18 @@ impl<'a> Writer<'a> { } } -struct Reader { - subtree: Vec, - literal: Vec, - punct: Vec, - ident: Vec, +struct Reader { + subtree: Vec>, + literal: Vec>, + punct: Vec>, + ident: Vec>, token_tree: Vec, text: Vec, } -impl Reader { - pub(crate) fn read(self) -> tt::Subtree { - let mut res: Vec> = vec![None; self.subtree.len()]; +impl> Reader { + pub(crate) fn read(self) -> tt::Subtree { + let mut res: Vec>> = vec![None; self.subtree.len()]; for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; From 890eb17b4e1659b22f8113ff99506a2b0eefe5ff Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 29 Sep 2023 12:37:57 +0200 Subject: [PATCH 04/80] Replace ID based TokenMap with proper relative text-ranges / spans --- Cargo.lock | 2 + crates/base-db/src/fixture.rs | 33 +- crates/base-db/src/input.rs | 9 +- crates/base-db/src/lib.rs | 1 + crates/base-db/src/span.rs | 166 +++++ crates/cfg/src/tests.rs | 39 +- crates/hir-def/src/attr.rs | 89 ++- crates/hir-def/src/attr/tests.rs | 11 +- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/expander.rs | 17 +- crates/hir-def/src/generics.rs | 18 +- crates/hir-def/src/item_tree.rs | 7 +- crates/hir-def/src/item_tree/lower.rs | 210 ++++-- crates/hir-def/src/lib.rs | 18 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 69 +- .../hir-def/src/macro_expansion_tests/mod.rs | 97 +-- crates/hir-def/src/nameres/collector.rs | 9 +- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- .../hir-def/src/nameres/tests/incremental.rs | 28 +- crates/hir-expand/src/ast_id_map.rs | 18 +- crates/hir-expand/src/attrs.rs | 45 +- crates/hir-expand/src/builtin_attr_macro.rs | 6 +- crates/hir-expand/src/builtin_derive_macro.rs | 147 ++-- crates/hir-expand/src/builtin_fn_macro.rs | 39 +- crates/hir-expand/src/db.rs | 509 ++++++------- crates/hir-expand/src/eager.rs | 171 +++-- crates/hir-expand/src/hygiene.rs | 132 ++-- crates/hir-expand/src/lib.rs | 395 ++++------ crates/hir-expand/src/quote.rs | 39 +- crates/hir-ty/src/diagnostics/decl_check.rs | 2 +- crates/hir-ty/src/mir/eval.rs | 2 +- crates/hir/src/db.rs | 4 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/semantics.rs | 21 +- crates/hir/src/semantics/source_to_def.rs | 2 +- crates/hir/src/source_analyzer.rs | 2 +- .../src/handlers/extract_module.rs | 2 +- .../src/handlers/fix_visibility.rs | 4 +- .../src/handlers/generate_constant.rs | 2 +- .../src/handlers/generate_enum_variant.rs | 2 +- .../src/handlers/generate_function.rs | 5 +- .../src/handlers/remove_unused_imports.rs | 2 +- .../replace_derive_with_manual_impl.rs | 2 +- crates/ide-completion/src/completions.rs | 2 +- crates/ide-completion/src/completions/mod_.rs | 2 +- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- crates/ide-db/src/rename.rs | 2 +- crates/ide-db/src/search.rs | 3 +- .../ide-db/src/test_data/test_doc_alias.txt | 28 +- .../test_symbol_index_collection.txt | 118 +-- .../src/handlers/missing_fields.rs | 2 +- .../src/handlers/missing_unsafe.rs | 1 + .../src/handlers/no_such_field.rs | 2 +- .../replace_filter_map_next_with_find_map.rs | 2 +- .../src/handlers/type_mismatch.rs | 2 +- .../src/handlers/unresolved_module.rs | 2 +- crates/ide/src/call_hierarchy.rs | 4 +- crates/ide/src/expand_macro.rs | 2 +- crates/ide/src/goto_definition.rs | 8 +- crates/ide/src/rename.rs | 2 +- crates/ide/src/runnables.rs | 2 +- crates/ide/src/static_index.rs | 2 +- .../ide/src/syntax_highlighting/highlight.rs | 2 +- crates/load-cargo/src/lib.rs | 38 +- crates/mbe/src/benchmark.rs | 67 +- crates/mbe/src/lib.rs | 126 +--- crates/mbe/src/syntax_bridge.rs | 701 ++++++------------ crates/mbe/src/syntax_bridge/tests.rs | 24 +- crates/mbe/src/token_map.rs | 208 +++--- crates/proc-macro-api/Cargo.toml | 1 + crates/proc-macro-api/src/msg/flat.rs | 14 + crates/rust-analyzer/src/cargo_target_spec.rs | 11 +- .../rust-analyzer/src/cli/analysis_stats.rs | 2 +- crates/rust-analyzer/src/cli/diagnostics.rs | 2 +- crates/syntax/src/lib.rs | 46 +- crates/syntax/src/parsing/reparsing.rs | 12 +- crates/syntax/src/tests.rs | 2 +- crates/tt/Cargo.toml | 1 + crates/tt/src/lib.rs | 39 +- 80 files changed, 1819 insertions(+), 2049 deletions(-) create mode 100644 crates/base-db/src/span.rs diff --git a/Cargo.lock b/Cargo.lock index 5a8d971c3d4f8..90ee0810fa6aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1263,6 +1263,7 @@ dependencies = [ "serde_json", "snap", "stdx", + "text-size", "tracing", "triomphe", "tt", @@ -2010,6 +2011,7 @@ version = "0.0.0" dependencies = [ "smol_str", "stdx", + "text-size", ] [[package]] diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 3da555a47acb2..7236b56f6d47d 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -8,11 +8,12 @@ use test_utils::{ ESCAPED_CURSOR_MARKER, }; use triomphe::Arc; -use tt::token_id::{Leaf, Subtree, TokenTree}; +use tt::{Leaf, Subtree, TokenTree}; use vfs::{file_set::FileSet, VfsPath}; use crate::{ input::{CrateName, CrateOrigin, LangCrateOrigin}, + span::SpanData, Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, @@ -539,10 +540,10 @@ struct IdentityProcMacroExpander; impl ProcMacroExpander for IdentityProcMacroExpander { fn expand( &self, - subtree: &Subtree, - _: Option<&Subtree>, + subtree: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -553,10 +554,10 @@ struct AttributeInputReplaceProcMacroExpander; impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { fn expand( &self, - _: &Subtree, - attrs: Option<&Subtree>, + _: &Subtree, + attrs: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { attrs .cloned() .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) @@ -568,11 +569,11 @@ struct MirrorProcMacroExpander; impl ProcMacroExpander for MirrorProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { - fn traverse(input: &Subtree) -> Subtree { + ) -> Result, ProcMacroExpansionError> { + fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { @@ -595,13 +596,13 @@ struct ShortenProcMacroExpander; impl ProcMacroExpander for ShortenProcMacroExpander { fn expand( &self, - input: &Subtree, - _: Option<&Subtree>, + input: &Subtree, + _: Option<&Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); - fn traverse(input: &Subtree) -> Subtree { + fn traverse(input: &Subtree) -> Subtree { let token_trees = input .token_trees .iter() @@ -613,7 +614,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander { Subtree { delimiter: input.delimiter, token_trees } } - fn modify_leaf(leaf: &Leaf) -> Leaf { + fn modify_leaf(leaf: &Leaf) -> Leaf { let mut leaf = leaf.clone(); match &mut leaf { Leaf::Literal(it) => { diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index e4f78321e215a..2fa5c25c91e5d 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -13,9 +13,10 @@ use la_arena::{Arena, Idx}; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::SmolStr; use triomphe::Arc; -use tt::token_id::Subtree; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; +use crate::span::SpanData; + // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // then the crate for the proc-macro hasn't been build yet as the build data is missing. pub type ProcMacroPaths = FxHashMap, AbsPathBuf), String>>; @@ -255,10 +256,10 @@ pub enum ProcMacroKind { pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { fn expand( &self, - subtree: &Subtree, - attrs: Option<&Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result; + ) -> Result, ProcMacroExpansionError>; } #[derive(Debug)] diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 40cfab88afdbe..6dc1629c3be9b 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -5,6 +5,7 @@ mod input; mod change; pub mod fixture; +pub mod span; use std::panic; diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs new file mode 100644 index 0000000000000..1072d937a3bd2 --- /dev/null +++ b/crates/base-db/src/span.rs @@ -0,0 +1,166 @@ +use std::fmt; + +use salsa::InternId; +use vfs::FileId; + +pub type ErasedFileAstId = la_arena::Idx; + +// The first inde is always the root node's AstId +pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = + la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SyntaxContext; + +pub type SpanData = tt::SpanData; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct SpanAnchor { + pub file_id: HirFileId, + pub ast_id: ErasedFileAstId, +} + +impl fmt::Debug for SpanAnchor { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish() + } +} + +impl tt::Span for SpanAnchor { + const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; +} + +/// Input to the analyzer is a set of files, where each file is identified by +/// `FileId` and contains source code. However, another source of source code in +/// Rust are macros: each macro can be thought of as producing a "temporary +/// file". To assign an id to such a file, we use the id of the macro call that +/// produced the file. So, a `HirFileId` is either a `FileId` (source code +/// written by user), or a `MacroCallId` (source code produced by macro). +/// +/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file +/// containing the call plus the offset of the macro call in the file. Note that +/// this is a recursive definition! However, the size_of of `HirFileId` is +/// finite (because everything bottoms out at the real `FileId`) and small +/// (`MacroCallId` uses the location interning. You can check details here: +/// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. +#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct HirFileId(u32); + +impl From for u32 { + fn from(value: HirFileId) -> Self { + value.0 + } +} + +impl From for HirFileId { + fn from(value: u32) -> Self { + HirFileId(value) + } +} + +impl From for HirFileId { + fn from(value: MacroCallId) -> Self { + value.as_file() + } +} + +impl fmt::Debug for HirFileId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.repr().fmt(f) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroFile { + pub macro_call_id: MacroCallId, +} + +/// `MacroCallId` identifies a particular macro invocation, like +/// `println!("Hello, {}", world)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MacroCallId(salsa::InternId); +crate::impl_intern_key!(MacroCallId); + +impl MacroCallId { + pub fn as_file(self) -> HirFileId { + MacroFile { macro_call_id: self }.into() + } + + pub fn as_macro_file(self) -> MacroFile { + MacroFile { macro_call_id: self } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl fmt::Debug for HirFileIdRepr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(), + Self::MacroFile(arg0) => { + f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish() + } + } + } +} + +impl From for HirFileId { + fn from(FileId(id): FileId) -> Self { + assert!(id < Self::MAX_FILE_ID); + HirFileId(id) + } +} + +impl From for HirFileId { + fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + +impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + + #[inline] + pub fn is_macro(self) -> bool { + self.0 & Self::MACRO_FILE_TAG_MASK != 0 + } + + #[inline] + pub fn macro_file(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + #[inline] + pub fn file_id(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => Some(FileId(self.0)), + _ => None, + } + } + + #[inline] + pub fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } +} diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index bdc3f854e0866..242929c006a5b 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -2,36 +2,37 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; +use tt::Span; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +struct DummyFile; +impl Span for DummyFile { + const DUMMY: Self = DummyFile; +} + fn assert_parse_result(input: &str, expected: CfgExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } fn check_dnf(input: &str, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); } fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -40,11 +41,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { #[track_caller] fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index fa3025e0303d1..35c9d63979a06 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -7,7 +7,10 @@ mod tests; use std::{hash::Hash, ops, slice::Iter as SliceIter}; -use base_db::CrateId; +use base_db::{ + span::{ErasedFileAstId, SpanAnchor}, + CrateId, +}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -28,8 +31,8 @@ use crate::{ lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, - AdtId, AssocItemLoc, AttrDefId, EnumId, GenericParamId, ItemLoc, LocalEnumVariantId, - LocalFieldId, Lookup, MacroId, VariantId, + AdtId, AssocItemLoc, AttrDefId, EnumId, GenericDefId, GenericParamId, ItemLoc, + LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId, }; #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -415,26 +418,48 @@ impl AttrsWithOwner { AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::GenericParamId(it) => match it { - GenericParamId::ConstParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - ) - } - GenericParamId::TypeParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - ) - } - GenericParamId::LifetimeParamId(it) => { - let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) + AttrDefId::GenericParamId(it) => { + let ast_id = |p| match p { + GenericDefId::AdtId(AdtId::StructId(it)) => { + erased_ast_id_from_item_tree(db, it) + } + GenericDefId::AdtId(AdtId::EnumId(it)) => erased_ast_id_from_item_tree(db, it), + GenericDefId::AdtId(AdtId::UnionId(it)) => erased_ast_id_from_item_tree(db, it), + GenericDefId::TraitId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::TraitAliasId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::ImplId(it) => erased_ast_id_from_item_tree(db, it), + GenericDefId::EnumVariantId(it) => erased_ast_id_from_item_tree(db, it.parent), + GenericDefId::TypeAliasId(it) => erased_ast_id_from_item_tree_assoc(db, it), + GenericDefId::FunctionId(it) => erased_ast_id_from_item_tree_assoc(db, it), + GenericDefId::ConstId(it) => erased_ast_id_from_item_tree_assoc(db, it), + }; + match it { + GenericParamId::ConstParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, + src.with_value(&src.value[it.local_id()]), + ) + } + GenericParamId::TypeParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, + src.with_value(&src.value[it.local_id()]), + ) + } + GenericParamId::LifetimeParamId(it) => { + let src = it.parent.child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent) }, + src.with_value(&src.value[it.local_id]), + ) + } } - }, + } AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it), @@ -638,6 +663,26 @@ fn any_has_attrs( id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } +fn erased_ast_id_from_item_tree( + db: &dyn DefDatabase, + lookup: impl Lookup>, +) -> ErasedFileAstId { + let id = lookup.lookup(db).id; + let tree = id.item_tree(db); + let mod_item = N::id_to_mod_item(id.value); + mod_item.ast_id(&tree).erase() +} + +fn erased_ast_id_from_item_tree_assoc( + db: &dyn DefDatabase, + lookup: impl Lookup>, +) -> ErasedFileAstId { + let id = lookup.lookup(db).id; + let tree = id.item_tree(db); + let mod_item = N::id_to_mod_item(id.value); + mod_item.ast_id(&tree).erase() +} + fn attrs_from_item_tree(db: &dyn DefDatabase, id: ItemTreeId) -> RawAttrs { let tree = id.item_tree(db); let mod_item = N::id_to_mod_item(id.value); diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index e4c8d446af7bd..ad101e9bdf776 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -1,17 +1,18 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. +use base_db::span::SpanAnchor; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; +use tt::Span; use crate::attr::{DocAtom, DocExpr}; fn assert_parse_result(input: &str, expected: DocExpr) { - let (tt, _) = { - let source_file = ast::SourceFile::parse(input).ok().unwrap(); - let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - syntax_node_to_token_tree(tt.syntax()) - }; + let source_file = ast::SourceFile::parse(input).ok().unwrap(); + let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); + let tt = + syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default()); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); } diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 72ccc17486f0d..4083d497917ce 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, MacroCallKind::Attr { ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: Arc::new(tt::Subtree::empty()), invoc_attr_index: attr.id, }, attr.path().clone(), diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 6db8398bc9867..b7cffb57625ee 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -1,6 +1,9 @@ //! Macro expansion utilities. -use base_db::CrateId; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ @@ -118,7 +121,17 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) + Attrs::filter( + db, + self.krate, + RawAttrs::new( + db.upcast(), + // Usin `ROOT_ERASED_FILE_AST_ID` here is fine as this is only used for cfg checking + SpanAnchor { file_id: self.current_file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + owner, + &self.hygiene, + ), + ) } pub(crate) fn cfg_options(&self) -> &CfgOptions { diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index fac90e6630452..6d656bf9482f1 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -21,7 +21,7 @@ use crate::{ db::DefDatabase, dyn_map::{keys, DynMap}, expander::Expander, - item_tree::{AttrOwner, ItemTree}, + item_tree::ItemTree, lower::LowerCtx, nameres::{DefMap, MacroSubNs}, src::{HasChildSource, HasSource}, @@ -250,7 +250,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams, - add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { if let Some(params) = node.generic_param_list() { self.fill_params(lower_ctx, params, add_param_attrs) @@ -275,7 +278,10 @@ impl GenericParams { &mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList, - mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), + mut add_param_attrs: impl FnMut( + Either, Idx>, + ast::GenericParam, + ), ) { for type_or_const_param in params.type_or_const_params() { match type_or_const_param { @@ -297,7 +303,7 @@ impl GenericParams { type_param.type_bound_list(), Either::Left(type_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param)); } ast::TypeOrConstParam::Const(const_param) => { let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); @@ -310,7 +316,7 @@ impl GenericParams { default: ConstRef::from_const_param(lower_ctx, &const_param), }; let idx = self.type_or_consts.alloc(param.into()); - add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param)); + add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param)); } } } @@ -325,7 +331,7 @@ impl GenericParams { lifetime_param.type_bound_list(), Either::Right(lifetime_ref), ); - add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param)); + add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param)); } } diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 70b96b25739cb..3bea91ee61c3c 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -43,7 +43,10 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::CrateId; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, @@ -119,7 +122,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.hygiene())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index c898eb5f92112..807b2a7bf752d 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,12 +2,14 @@ use std::collections::hash_map::Entry; +use base_db::span::ErasedFileAstId; use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ generics::{GenericParams, TypeParamData, TypeParamProvenance}, type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + LocalLifetimeParamId, LocalTypeOrConstParamId, }; use super::*; @@ -21,6 +23,7 @@ pub(super) struct Ctx<'a> { tree: ItemTree, source_ast_id_map: Arc, body_ctx: crate::lower::LowerCtx<'a>, + file: HirFileId, } impl<'a> Ctx<'a> { @@ -30,6 +33,7 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), source_ast_id_map: db.ast_id_map(file), body_ctx: crate::lower::LowerCtx::with_file_id(db, file), + file, } } @@ -77,9 +81,18 @@ impl<'a> Ctx<'a> { } pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { - self.tree - .attrs - .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene())); + self.tree.attrs.insert( + AttrOwner::TopLevel, + RawAttrs::new( + self.db.upcast(), + SpanAnchor { + file_id: self.file, + ast_id: self.source_ast_id_map.ast_id(block).erase(), + }, + block, + self.hygiene(), + ), + ); self.tree.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -109,8 +122,7 @@ impl<'a> Ctx<'a> { } fn lower_mod_item(&mut self, item: &ast::Item) -> Option { - let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); - let item: ModItem = match item { + let mod_item: ModItem = match item { ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(), ast::Item::Enum(ast) => self.lower_enum(ast)?.into(), @@ -129,10 +141,15 @@ impl<'a> Ctx<'a> { ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), }; + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() }, + item, + self.hygiene(), + ); + self.add_attrs(mod_item.into(), attrs); - self.add_attrs(item.into(), attrs); - - Some(item) + Some(mod_item) } fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { @@ -146,21 +163,37 @@ impl<'a> Ctx<'a> { } } - fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option { - match item { + fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option { + let item: AssocItem = match item_node { ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), - } + }?; + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() }, + item_node, + self.hygiene(), + ); + self.add_attrs( + match item { + AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)), + AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)), + AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)), + AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)), + }, + attrs, + ); + Some(item) } fn lower_struct(&mut self, strukt: &ast::Struct) -> Option> { let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); - let fields = self.lower_fields(&strukt.kind()); let ast_id = self.source_ast_id_map.ast_id(strukt); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt, ast_id.erase()); + let fields = self.lower_fields(&strukt.kind()); let res = Struct { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().structs.alloc(res))) } @@ -183,8 +216,20 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { + let ast_id = match data.ast_id { + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), + }; let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &field, + self.hygiene(), + ), + ); } } let end = self.next_field_idx(); @@ -204,8 +249,20 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); + let ast_id = match data.ast_id { + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), + }; let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &field, + self.hygiene(), + ), + ); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -222,12 +279,12 @@ impl<'a> Ctx<'a> { fn lower_union(&mut self, union: &ast::Union) -> Option> { let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); + let ast_id = self.source_ast_id_map.ast_id(union); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, union, ast_id.erase()); let fields = match union.record_field_list() { Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), }; - let ast_id = self.source_ast_id_map.ast_id(union); let res = Union { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().unions.alloc(res))) } @@ -235,12 +292,12 @@ impl<'a> Ctx<'a> { fn lower_enum(&mut self, enum_: &ast::Enum) -> Option> { let visibility = self.lower_visibility(enum_); let name = enum_.name()?.as_name(); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); + let ast_id = self.source_ast_id_map.ast_id(enum_); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_, ast_id.erase()); let variants = match &enum_.variant_list() { Some(variant_list) => self.lower_variants(variant_list), None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()), }; - let ast_id = self.source_ast_id_map.ast_id(enum_); let res = Enum { name, visibility, generic_params, variants, ast_id }; Some(id(self.data().enums.alloc(res))) } @@ -249,10 +306,16 @@ impl<'a> Ctx<'a> { let start = self.next_variant_idx(); for variant in variants.variants() { if let Some(data) = self.lower_variant(&variant) { + let ast_id = data.ast_id.erase(); let idx = self.data().variants.alloc(data); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id }, + &variant, + self.hygiene(), + ), ); } } @@ -303,28 +366,39 @@ impl<'a> Ctx<'a> { }); self.add_attrs( idx.into(), - RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, + &self_param, + self.hygiene(), + ), ); has_self_param = true; } for param in param_list.params() { + let ast_id = self.source_ast_id_map.ast_id(¶m); let idx = match param.dotdotdot_token() { - Some(_) => { - let ast_id = self.source_ast_id_map.ast_id(¶m); - self.data() - .params - .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }) - } + Some(_) => self + .data() + .params + .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }), None => { let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); let ty = Interned::new(type_ref); - let ast_id = self.source_ast_id_map.ast_id(¶m); self.data() .params .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) }) } }; - self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, + ¶m, + self.hygiene(), + ), + ); } } let end_param = self.next_param_idx(); @@ -381,7 +455,8 @@ impl<'a> Ctx<'a> { ast_id, flags, }; - res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func); + res.explicit_generic_params = + self.lower_generic_params(HasImplicitSelf::No, func, ast_id.erase()); Some(id(self.data().functions.alloc(res))) } @@ -394,8 +469,9 @@ impl<'a> Ctx<'a> { let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); let visibility = self.lower_visibility(type_alias); let bounds = self.lower_type_bounds(type_alias); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias); + let generic_params = + self.lower_generic_params(HasImplicitSelf::No, type_alias, ast_id.erase()); let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; Some(id(self.data().type_aliases.alloc(res))) } @@ -443,23 +519,20 @@ impl<'a> Ctx<'a> { fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option> { let name = trait_def.name()?.as_name(); let visibility = self.lower_visibility(trait_def); - let generic_params = - self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); + let ast_id = self.source_ast_id_map.ast_id(trait_def); + let generic_params = self.lower_generic_params( + HasImplicitSelf::Yes(trait_def.type_bound_list()), + trait_def, + ast_id.erase(), + ); let is_auto = trait_def.auto_token().is_some(); let is_unsafe = trait_def.unsafe_token().is_some(); - let ast_id = self.source_ast_id_map.ast_id(trait_def); let items = trait_def .assoc_item_list() .into_iter() .flat_map(|list| list.assoc_items()) - .filter_map(|item| { - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.lower_assoc_item(&item).map(|item| { - self.add_attrs(ModItem::from(item).into(), attrs); - item - }) - }) + .filter_map(|item_node| self.lower_assoc_item(&item_node)) .collect(); let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; @@ -472,20 +545,23 @@ impl<'a> Ctx<'a> { ) -> Option> { let name = trait_alias_def.name()?.as_name(); let visibility = self.lower_visibility(trait_alias_def); + let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let generic_params = self.lower_generic_params( HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), trait_alias_def, + ast_id.erase(), ); - let ast_id = self.source_ast_id_map.ast_id(trait_alias_def); let alias = TraitAlias { name, visibility, generic_params, ast_id }; Some(id(self.data().trait_aliases.alloc(alias))) } fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option> { + let ast_id = self.source_ast_id_map.ast_id(impl_def); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. - let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); + let generic_params = + self.lower_generic_params(HasImplicitSelf::No, impl_def, ast_id.erase()); // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only // equals itself. @@ -499,14 +575,8 @@ impl<'a> Ctx<'a> { .assoc_item_list() .into_iter() .flat_map(|it| it.assoc_items()) - .filter_map(|item| { - let assoc = self.lower_assoc_item(&item)?; - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - self.add_attrs(ModItem::from(assoc).into(), attrs); - Some(assoc) - }) + .filter_map(|item| self.lower_assoc_item(&item)) .collect(); - let ast_id = self.source_ast_id_map.ast_id(impl_def); let res = Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id }; Some(id(self.data().impls.alloc(res))) @@ -572,15 +642,23 @@ impl<'a> Ctx<'a> { // (in other words, the knowledge that they're in an extern block must not be used). // This is because an extern block can contain macros whose ItemTree's top-level items // should be considered to be in an extern block too. - let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); - let id: ModItem = match item { - ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), - ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), - ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(), - ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(), + let mod_item: ModItem = match &item { + ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(), + ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(), + ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), + ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - self.add_attrs(id.into(), attrs); - Some(id) + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { + file_id: self.file, + ast_id: mod_item.ast_id(&self.tree).erase(), + }, + &item, + self.hygiene(), + ); + self.add_attrs(mod_item.into(), attrs); + Some(mod_item) }) .collect() }); @@ -593,6 +671,7 @@ impl<'a> Ctx<'a> { &mut self, has_implicit_self: HasImplicitSelf, node: &dyn ast::HasGenericParams, + owner_ast_id: ErasedFileAstId, ) -> Interned { let mut generics = GenericParams::default(); @@ -612,12 +691,21 @@ impl<'a> Ctx<'a> { generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param)); } - let add_param_attrs = |item, param| { - let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.hygiene()); + let add_param_attrs = |item: Either, + param| { + let attrs = RawAttrs::new( + self.db.upcast(), + SpanAnchor { file_id: self.file, ast_id: owner_ast_id }, + ¶m, + self.body_ctx.hygiene(), + ); // This is identical to the body of `Ctx::add_attrs()` but we can't call that here // because it requires `&mut self` and the call to `generics.fill()` below also // references `self`. - match self.tree.attrs.entry(item) { + match self.tree.attrs.entry(match item { + Either::Right(id) => id.into(), + Either::Left(id) => id.into(), + }) { Entry::Occupied(mut entry) => { *entry.get_mut() = entry.get().merge(attrs); } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index fd8f64d6705b0..5f09d7c481345 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -84,7 +84,7 @@ use nameres::DefMap; use stdx::impl_from; use syntax::ast; -use ::tt::token_id as tt; +pub use hir_expand::tt; use crate::{ builtin_type::BuiltinType, @@ -1341,15 +1341,13 @@ fn attr_macro_as_call_id( def: MacroDefId, ) -> MacroCallId { let arg = match macro_attr.input.as_deref() { - Some(AttrInput::TokenTree(tt)) => ( - { - let mut tt = tt.0.clone(); - tt.delimiter = tt::Delimiter::UNSPECIFIED; - tt - }, - tt.1.clone(), - ), - _ => (tt::Subtree::empty(), Default::default()), + Some(AttrInput::TokenTree(tt)) => { + let mut tt = tt.as_ref().clone(); + tt.delimiter = tt::Delimiter::UNSPECIFIED; + tt + } + + _ => tt::Subtree::empty(), }; def.as_lazy_macro( diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index d0906213243d6..b5d70052a89e8 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check; fn token_mapping_smoke_test() { check( r#" -// +tokenids macro_rules! f { ( struct $ident:ident ) => { struct $ident { @@ -27,23 +26,19 @@ macro_rules! f { // +tokenids f!(struct MyTraitMap2); "#, - expect![[r##" -// call ids will be shifted by Shift(30) -// +tokenids -macro_rules! f {#0 - (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9 - struct#10 $#11ident#12 {#13 - map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28 - }#13 - }#9;#29 -}#0 - -// // +tokenids -// f!(struct#1 MyTraitMap2#2); -struct#10 MyTraitMap2#32 {#13 - map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28 -}#13 -"##]], + expect![[r#" +macro_rules! f { + ( struct $ident:ident ) => { + struct $ident { + map: ::std::collections::HashSet<()>, + } + }; +} + +struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73 + map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 ::std#SpanAnchor(FileId(0), 1)@93..96::collections#SpanAnchor(FileId(0), 1)@98..109::HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123 +}#SpanAnchor(FileId(0), 1)@132..133 +"#]], ); } @@ -71,31 +66,22 @@ f! { "#, - expect![[r##" -// call ids will be shifted by Shift(18) + expect![[r#" // +tokenids -macro_rules! f {#0 - (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11 - $#12(#13$#14tt#15)#13*#16 - }#11;#17 -}#0 - -// // +tokenids -// f! { -// fn#1 main#2() { -// 1#5;#6 -// 1.0#7;#8 -// let#9 x#10 =#11 1#12;#13 -// } -// } -fn#19 main#20(#21)#21 {#22 - 1#23;#24 - 1.0#25;#26 - let#27 x#28 =#29 1#30;#31 -}#22 +macro_rules! f { + ($($tt:tt)*) => { + $($tt)* + }; +} +fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33 + 1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44 + 1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57 + let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76 +}#SpanAnchor(FileId(0), 2)@81..82 -"##]], + +"#]], ); } @@ -150,8 +136,7 @@ macro_rules! identity { } fn main(foo: ()) { - // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17) -builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0 + builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87 } "##]], diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 8adced4e08244..1a672b4605c66 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,21 +16,16 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use ::mbe::TokenMap; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{ - db::{DeclarativeMacroExpander, ExpandDatabase}, - AstId, InFile, MacroFile, -}; +use hir_expand::{db::ExpandDatabase, HirFileIdExt, InFile, MacroFile, SpanMap}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, - AstNode, SyntaxElement, - SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}, - SyntaxNode, TextRange, T, + AstNode, + SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT}, + SyntaxNode, T, }; -use tt::token_id::{Subtree, TokenId}; use crate::{ db::DefDatabase, @@ -39,6 +34,7 @@ use crate::{ resolver::HasResolver, src::HasSource, test_db::TestDB, + tt::Subtree, AdtId, AsMacroCall, Lookup, ModuleDefId, }; @@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut text_edits = Vec::new(); let mut expansions = Vec::new(); - for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) { - let mut show_token_ids = false; - for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { - show_token_ids |= comment.to_string().contains("+tokenids"); - } - if !show_token_ids { - continue; - } - - let call_offset = macro_.syntax().text_range().start().into(); - let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_); - let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); - - let DeclarativeMacroExpander { mac, def_site_token_map } = - &*db.decl_macro_expander(krate, ast_id); - assert_eq!(mac.err(), None); - let tt = match ¯o_ { - ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), - ast::Macro::MacroDef(_) => unimplemented!(""), - }; - - let tt_start = tt.syntax().text_range().start(); - tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each( - |token| { - let range = token.text_range().checked_sub(tt_start).unwrap(); - if let Some(id) = def_site_token_map.token_by_range(range) { - let offset = (range.end() + tt_start).into(); - text_edits.push((offset..offset, format!("#{}", id.0))); - } - }, - ); - text_edits.push(( - call_offset..call_offset, - format!("// call ids will be shifted by {:?}\n", mac.shift()), - )); - } - for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { let macro_call = InFile::new(source.file_id, ¯o_call); let res = macro_call @@ -138,10 +97,10 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let macro_file = MacroFile { macro_call_id }; let mut expansion_result = db.parse_macro_expansion(macro_file); expansion_result.err = expansion_result.err.or(res.err); - expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id))); + expansions.push((macro_call.value.clone(), expansion_result)); } - for (call, exp, arg) in expansions.into_iter().rev() { + for (call, exp) in expansions.into_iter().rev() { let mut tree = false; let mut expect_errors = false; let mut show_token_ids = false; @@ -185,27 +144,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } let range = call.syntax().text_range(); let range: Range = range.into(); - - if show_token_ids { - if let Some((tree, map, _)) = arg.value.as_deref() { - let tt_range = call.token_tree().unwrap().syntax().text_range(); - let mut ranges = Vec::new(); - extract_id_ranges(&mut ranges, map, tree); - for (range, id) in ranges { - let idx = (tt_range.start() + range.end()).into(); - text_edits.push((idx..idx, format!("#{}", id.0))); - } - } - text_edits.push((range.start..range.start, "// ".into())); - call.to_string().match_indices('\n').for_each(|(offset, _)| { - let offset = offset + 1 + range.start; - text_edits.push((offset..offset, "// ".into())); - }); - text_edits.push((range.end..range.end, "\n".into())); - text_edits.push((range.end..range.end, expn_text)); - } else { - text_edits.push((range, expn_text)); - } + text_edits.push((range, expn_text)); } text_edits.sort_by_key(|(range, _)| range.start); @@ -246,20 +185,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream expect.assert_eq(&expanded_text); } -fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) { - tree.token_trees.iter().for_each(|tree| match tree { - tt::TokenTree::Leaf(leaf) => { - let id = match leaf { - tt::Leaf::Literal(it) => it.span, - tt::Leaf::Punct(it) => it.span, - tt::Leaf::Ident(it) => it.span, - }; - ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id))); - } - tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree), - }); -} - fn reindent(indent: IndentLevel, pp: String) -> String { if !pp.contains('\n') { return pp; @@ -276,7 +201,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String { res } -fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String { +fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> String { let mut res = String::new(); let mut prev_kind = EOF; let mut indent_level = 0; @@ -323,8 +248,8 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str prev_kind = curr_kind; format_to!(res, "{}", token); if let Some(map) = map { - if let Some(id) = map.token_by_range(token.text_range()) { - format_to!(res, "#{}", id.0); + if let Some(span) = map.span_for_range(token.text_range()) { + format_to!(res, "#{:?}@{:?}", span.anchor, span.range); } } } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 2d4586146db02..659e7ed5033e4 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,6 +5,7 @@ use std::{cmp::Ordering, iter, mem}; +use ::tt::Span; use base_db::{CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; @@ -85,8 +86,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI .enumerate() .map(|(idx, it)| { // FIXME: a hacky way to create a Name from string. - let name = - tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; + let name = tt::Ident { text: it.name.clone(), span: tt::SpanData::DUMMY }; (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) }) .collect()) @@ -471,7 +471,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, - attr_args: Arc::new((tt::Subtree::empty(), Default::default())), + attr_args: Arc::new(tt::Subtree::empty()), invoc_attr_index: attr.id, }, attr.path().clone(), @@ -2083,8 +2083,7 @@ impl ModCollector<'_, '_> { let name = match attrs.by_key("rustc_builtin_macro").string_value() { Some(it) => { // FIXME: a hacky way to create a Name from string. - name = - tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name(); + name = tt::Ident { text: it.clone(), span: tt::SpanData::DUMMY }.as_name(); &name } None => { diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 2dcc2c30fe169..22802433aa58d 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,7 +1,7 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; use base_db::{AnchoredPath, FileId}; -use hir_expand::name::Name; +use hir_expand::{name::Name, HirFileIdExt}; use limit::Limit; use syntax::SmolStr; diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 4a86f88e57aff..977745c476f15 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -66,7 +66,7 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { #[test] fn typing_inside_a_macro_should_not_invalidate_def_map() { - let (mut db, pos) = TestDB::with_position( + check_def_map_is_not_recomputed( r" //- /lib.rs macro_rules! m { @@ -84,27 +84,15 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { //- /foo/bar.rs $0 m!(X); + + pub struct S {} ", - ); - let krate = db.test_crate(); - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") - } - db.set_file_text(pos.file_id, Arc::from("m!(Y);")); + r" + m!(Y); - { - let events = db.log_executed(|| { - let crate_def_map = db.crate_def_map(krate); - let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); - assert_eq!(module_data.scope.resolutions().count(), 1); - }); - assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") - } + pub struct S {} + ", + ); } #[test] diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 40726505491b6..eb43ae37e08a6 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -12,11 +12,13 @@ use std::{ marker::PhantomData, }; -use la_arena::{Arena, Idx}; +use la_arena::{Arena, Idx, RawIdx}; use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; +pub use base_db::span::ErasedFileAstId; + /// `AstId` points to an AST node in a specific file. pub struct FileAstId { raw: ErasedFileAstId, @@ -62,8 +64,6 @@ impl FileAstId { } } -pub type ErasedFileAstId = Idx; - pub trait AstIdNode: AstNode {} macro_rules! register_ast_id_node { (impl AstIdNode for $($ident:ident),+ ) => { @@ -129,6 +129,11 @@ impl AstIdMap { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { assert!(node.parent().is_none()); let mut res = AstIdMap::default(); + + // make sure to allocate the root node + if !should_alloc_id(node.kind()) { + res.alloc(node); + } // By walking the tree in breadth-first order we make sure that parents // get lower ids then children. That is, adding a new child does not // change parent's id. This means that, say, adding a new function to a @@ -155,6 +160,11 @@ impl AstIdMap { res } + /// The [`AstId`] of the root node + pub fn root(&self) -> SyntaxNodePtr { + self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone() + } + pub fn ast_id(&self, item: &N) -> FileAstId { let raw = self.erased_ast_id(item.syntax()); FileAstId { raw, covariant: PhantomData } @@ -164,7 +174,7 @@ impl AstIdMap { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } - pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { + pub fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { self.arena[id].clone() } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 0ec2422b30cf8..9652dd345a8d1 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,7 +1,8 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use base_db::CrateId; +use ::tt::Span; +use base_db::{span::SpanAnchor, CrateId}; use cfg::CfgExpr; use either::Either; use intern::Interned; @@ -39,11 +40,16 @@ impl ops::Deref for RawAttrs { impl RawAttrs { pub const EMPTY: Self = Self { entries: None }; - pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { + pub fn new( + db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, + owner: &dyn ast::HasAttrs, + hygiene: &Hygiene, + ) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) + attr.meta().and_then(|meta| Attr::from_src(db, span_anchor, meta, hygiene, id)) } Either::Right(comment) => comment.doc_comment().map(|doc| Attr { id, @@ -58,9 +64,13 @@ impl RawAttrs { Self { entries: if entries.is_empty() { None } else { Some(entries) } } } - pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { + pub fn from_attrs_owner( + db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, + owner: InFile<&dyn ast::HasAttrs>, + ) -> Self { let hygiene = Hygiene::new(db, owner.file_id); - Self::new(db, owner.value, &hygiene) + Self::new(db, span_anchor, owner.value, &hygiene) } pub fn merge(&self, other: Self) -> Self { @@ -190,16 +200,17 @@ pub struct Attr { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum AttrInput { /// `#[attr = "string"]` + // FIXME: This is losing span Literal(SmolStr), /// `#[attr(subtree)]` - TokenTree(Box<(tt::Subtree, mbe::TokenMap)>), + TokenTree(Box), } impl fmt::Display for AttrInput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), - AttrInput::TokenTree(tt) => tt.0.fmt(f), + AttrInput::TokenTree(tt) => tt.fmt(f), } } } @@ -207,6 +218,7 @@ impl fmt::Display for AttrInput { impl Attr { fn from_src( db: &dyn ExpandDatabase, + span_anchor: SpanAnchor, ast: ast::Meta, hygiene: &Hygiene, id: AttrId, @@ -219,8 +231,13 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let (tree, map) = syntax_node_to_token_tree(tt.syntax()); - Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map))))) + // FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor + let offset = + db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start(); + // FIXME: Spanmap + let tree = + syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default()); + Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; @@ -233,10 +250,12 @@ impl Attr { hygiene: &Hygiene, id: AttrId, ) -> Option { - let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + // FIXME: Unecessary roundtrip tt -> ast -> tt + let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let ast = ast::Meta::cast(parse.syntax_node())?; - Self::from_src(db, ast, hygiene, id) + // FIXME: we discard spans here! + Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id) } pub fn path(&self) -> &ModPath { @@ -256,7 +275,7 @@ impl Attr { /// #[path(ident)] pub fn single_ident_value(&self) -> Option<&tt::Ident> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => match &*tt.0.token_trees { + AttrInput::TokenTree(tt) => match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), _ => None, }, @@ -267,7 +286,7 @@ impl Attr { /// #[path TokenTree] pub fn token_tree_value(&self) -> Option<&Subtree> { match self.input.as_deref()? { - AttrInput::TokenTree(tt) => Some(&tt.0), + AttrInput::TokenTree(tt) => Some(tt), _ => None, } } diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 4ee12e2f21290..de8c0ac810bf7 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,5 +1,7 @@ //! Builtin attributes. +use ::tt::Span; + use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { @@ -98,7 +100,7 @@ fn derive_attr_expand( ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let derives = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0, + MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => attr_args, _ => return ExpandResult::ok(tt::Subtree::empty()), }; pseudo_derive_attr_expansion(tt, derives) @@ -112,7 +114,7 @@ pub fn pseudo_derive_attr_expansion( tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, })) }; diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index ecc8b407a9c87..16cce35c13d9f 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,18 +1,20 @@ //! Builtin derives. -use ::tt::Ident; +use ::tt::Span; use base_db::{CrateOrigin, LangCrateOrigin}; use itertools::izip; -use mbe::TokenMap; use rustc_hash::FxHashSet; use stdx::never; use tracing::debug; use crate::{ name::{AsName, Name}, - tt::{self, TokenId}, + tt, SpanMap, +}; +use syntax::{ + ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}, + TextSize, }; -use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; @@ -29,7 +31,7 @@ macro_rules! register_builtin { db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - token_map: &TokenMap, + token_map: &SpanMap, ) -> ExpandResult { let expander = match *self { $( BuiltinDeriveExpander::$trait => $expand, )* @@ -71,7 +73,7 @@ enum VariantShape { } fn tuple_field_iterator(n: usize) -> impl Iterator { - (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified())) + (0..n).map(|it| tt::Ident::new(format!("f{it}"), tt::SpanData::DUMMY)) } impl VariantShape { @@ -117,7 +119,7 @@ impl VariantShape { } } - fn from(tm: &TokenMap, value: Option) -> Result { + fn from(tm: &SpanMap, value: Option) -> Result { let r = match value { None => VariantShape::Unit, Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( @@ -189,8 +191,8 @@ struct BasicAdtInfo { associated_types: Vec, } -fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result { - let (name, generic_param_list, shape) = match &adt { +fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result { + let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), @@ -234,21 +236,44 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree(it.syntax()).0 + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) } None => tt::Subtree::empty(), } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(it) => { - it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) - } + ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| { + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) + }), ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) + .map(|ty| { + mbe::syntax_node_to_token_tree( + ty.syntax(), + tm.span_for_range(ty.syntax().first_token().unwrap().text_range()) + .unwrap() + .anchor, + TextSize::from(0), + tm, + ) + }) .unwrap_or_else(tt::Subtree::empty); Some(ty) } else { @@ -282,20 +307,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) + .map(|it| { + mbe::syntax_node_to_token_tree( + it.syntax(), + tm.span_for_range(it.syntax().first_token().unwrap().text_range()).unwrap().anchor, + TextSize::from(0), + tm, + ) + }) .collect(); let name_token = name_to_token(&tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) } -fn name_to_token(token_map: &TokenMap, name: Option) -> Result { +fn name_to_token(token_map: &SpanMap, name: Option) -> Result { let name = name.ok_or_else(|| { debug!("parsed item has no name"); ExpandError::other("missing name") })?; - let name_token_id = - token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); - let name_token = tt::Ident { span: name_token_id, text: name.text().into() }; + let span = token_map.span_for_range(name.syntax().text_range()).unwrap(); + let name_token = tt::Ident { span, text: name.text().into() }; Ok(name_token) } @@ -332,7 +363,7 @@ fn name_to_token(token_map: &TokenMap, name: Option) -> Result tt::Subtree, ) -> ExpandResult { @@ -393,7 +424,7 @@ fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) @@ -403,16 +434,13 @@ fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn clone(&self) -> Self { #star self @@ -420,11 +448,8 @@ fn clone_expand( }; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn clone(&self) -> Self { match #star self {} @@ -452,16 +477,14 @@ fn clone_expand( } /// This function exists since `quote! { => }` doesn't work. -fn fat_arrow() -> ::tt::Subtree { - let eq = - tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; +fn fat_arrow() -> tt::Subtree { + let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; quote! { #eq> } } /// This function exists since `quote! { && }` doesn't work. -fn and_and() -> ::tt::Subtree { - let and = - tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; +fn and_and() -> tt::Subtree { + let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; quote! { #and& } } @@ -469,7 +492,7 @@ fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { @@ -509,7 +532,7 @@ fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { @@ -540,11 +563,8 @@ fn debug_expand( }, }; if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match #star self {} @@ -590,7 +610,7 @@ fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { @@ -599,11 +619,8 @@ fn hash_expand( return quote! {}; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = tt::Punct { - char: '*', - spacing: ::tt::Spacing::Alone, - span: tt::TokenId::unspecified(), - }; + let star = + tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; return quote! { fn hash(&self, ra_expand_state: &mut H) { match #star self {} @@ -644,7 +661,7 @@ fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) @@ -654,7 +671,7 @@ fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { @@ -674,14 +691,14 @@ fn partial_eq_expand( } [first, rest @ ..] => { let rest = rest.iter().map(|it| { - let t1 = Ident::new(format!("{}_self", it.text), it.span); - let t2 = Ident::new(format!("{}_other", it.text), it.span); + let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); + let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); let and_and = and_and(); quote!(#and_and #t1 .eq( #t2 )) }); let first = { - let t1 = Ident::new(format!("{}_self", first.text), first.span); - let t2 = Ident::new(format!("{}_other", first.text), first.span); + let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); + let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); quote!(#t1 .eq( #t2 )) }; quote!(#first ##rest) @@ -708,11 +725,11 @@ fn self_and_other_patterns( name: &tt::Ident, ) -> (Vec, Vec) { let self_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_self", it.text), it.span); + let t = tt::Ident::new(format!("{}_self", it.text), it.span); quote!(#t) }); let other_patterns = adt.shape.as_pattern_map(name, |it| { - let t = Ident::new(format!("{}_other", it.text), it.span); + let t = tt::Ident::new(format!("{}_other", it.text), it.span); quote!(#t) }); (self_patterns, other_patterns) @@ -722,7 +739,7 @@ fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { @@ -752,8 +769,8 @@ fn ord_expand( |(pat1, pat2, fields)| { let mut body = quote!(#krate::cmp::Ordering::Equal); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); body = compare(krate, quote!(#t1), quote!(#t2), body); } let fat_arrow = fat_arrow(); @@ -784,7 +801,7 @@ fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - tm: &TokenMap, + tm: &SpanMap, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { @@ -817,8 +834,8 @@ fn partial_ord_expand( |(pat1, pat2, fields)| { let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); for f in fields.into_iter().rev() { - let t1 = Ident::new(format!("{}_self", f.text), f.span); - let t2 = Ident::new(format!("{}_other", f.text), f.span); + let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); + let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); body = compare(krate, quote!(#t1), quote!(#t2), body); } let fat_arrow = fat_arrow(); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index a04de10b899cf..adbe49473aceb 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,17 +1,22 @@ //! Builtin macro -use base_db::{AnchoredPath, Edition, FileId}; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + AnchoredPath, Edition, FileId, +}; use cfg::CfgExpr; use either::Either; -use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap}; +use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use syntax::{ ast::{self, AstToken}, SmolStr, }; use crate::{ - db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId, - MacroCallLoc, + db::ExpandDatabase, + name, quote, + tt::{self, Span}, + EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; macro_rules! register_builtin { @@ -110,7 +115,7 @@ register_builtin! { } const DOLLAR_CRATE: tt::Ident = - tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; + tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::SpanData::DUMMY }; fn module_path_expand( _db: &dyn ExpandDatabase, @@ -131,7 +136,7 @@ fn line_expand( delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), - span: tt::Span::UNSPECIFIED, + span: tt::SpanData::DUMMY, }))], }) } @@ -179,7 +184,7 @@ fn assert_expand( token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, }))], }; let cond = cond.clone(); @@ -446,7 +451,7 @@ fn concat_bytes_expand( } } } - let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::SpanData::DUMMY }; ExpandResult { value: quote!([#ident]), err } } @@ -494,7 +499,7 @@ fn concat_idents_expand( } } } - let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: ident.into(), span: tt::SpanData::DUMMY }; ExpandResult { value: quote!(#ident), err } } @@ -533,15 +538,16 @@ fn include_expand( _tt: &tt::Subtree, ) -> ExpandResult { match db.include_expand(arg_id) { - Ok((res, _)) => ExpandResult::ok(res.0.clone()), + Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()), Err(e) => ExpandResult::new(tt::Subtree::empty(), e), } } +// FIXME: Check if this is still needed now after the token map rewrite pub(crate) fn include_arg_to_tt( db: &dyn ExpandDatabase, arg_id: MacroCallId, -) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { +) -> Result<(triomphe::Arc, FileId), ExpandError> { let loc = db.lookup_intern_macro_call(arg_id); let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); @@ -549,9 +555,12 @@ pub(crate) fn include_arg_to_tt( let path = parse_string(&arg.0)?; let file_id = relative_file(db, *arg_id, &path, false)?; - let (subtree, map) = - parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?; - Ok((triomphe::Arc::new((subtree, map)), file_id)) + let subtree = parse_to_token_tree( + &db.file_text(file_id), + SpanAnchor { file_id: file_id.into(), ast_id: ROOT_ERASED_FILE_AST_ID }, + ) + .ok_or(mbe::ExpandError::ConversionError)?; + Ok((triomphe::Arc::new(subtree), file_id)) } fn include_bytes_expand( @@ -568,7 +577,7 @@ fn include_bytes_expand( delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), - span: tt::TokenId::unspecified(), + span: tt::SpanData::DUMMY, }))], }; ExpandResult::ok(res) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index ff0d279d8cce1..32ba7b2f91155 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,22 +1,25 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, CrateId, Edition, SourceDatabase}; +use base_db::{ + salsa, + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, Edition, SourceDatabase, +}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, ValueResult}; -use rustc_hash::FxHashSet; +use mbe::{map_from_syntax_node, syntax_node_to_token_tree, ValueResult}; use syntax::{ ast::{self, HasAttrs, HasDocComments}, - AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, + AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, TextSize, T, }; use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, + builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, - MacroDefKind, MacroFile, ProcMacroExpander, + MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -30,8 +33,7 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { - pub mac: mbe::DeclarativeMacro, - pub def_site_token_map: mbe::TokenMap, + pub mac: mbe::DeclarativeMacro, } impl DeclarativeMacroExpander { @@ -41,21 +43,14 @@ impl DeclarativeMacroExpander { tt::Subtree::empty(), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(tt).map_err(Into::into), + None => self.mac.expand(&tt).map_err(Into::into), } } - - pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId { - self.mac.map_id_down(token_id) - } - - pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - self.mac.map_id_up(token_id) - } } #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { + /// Old-style `macro_rules` or the new macros 2.0 DeclarativeMacro(Arc), /// Stuff like `line!` and `file!`. BuiltIn(BuiltinFnLikeExpander), @@ -69,31 +64,6 @@ pub enum TokenExpander { ProcMacro(ProcMacroExpander), } -// FIXME: Get rid of these methods -impl TokenExpander { - pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => id, - } - } - - pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { - match self { - TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id), - TokenExpander::BuiltIn(..) - | TokenExpander::BuiltInEager(..) - | TokenExpander::BuiltInAttr(..) - | TokenExpander::BuiltInDerive(..) - | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), - } - } -} - #[salsa::query_group(ExpandDatabaseStorage)] pub trait ExpandDatabase: SourceDatabase { fn ast_id_map(&self, file_id: HirFileId) -> Arc; @@ -109,7 +79,7 @@ pub trait ExpandDatabase: SourceDatabase { fn parse_macro_expansion( &self, macro_file: MacroFile, - ) -> ExpandResult<(Parse, Arc)>; + ) -> ExpandResult<(Parse, Arc)>; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -118,23 +88,16 @@ pub trait ExpandDatabase: SourceDatabase { /// to be incremental. #[salsa::interned] fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; + #[salsa::interned] + fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId; - /// Lowers syntactic macro call to a token tree representation. - #[salsa::transparent] - fn macro_arg( - &self, - id: MacroCallId, - ) -> ValueResult< - Option>, - Arc>, - >; - /// Extracts syntax node, corresponding to a macro call. That's a firewall + /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. - fn macro_arg_node( + fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, Arc>>; + ) -> ValueResult>, Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; @@ -152,10 +115,7 @@ pub trait ExpandDatabase: SourceDatabase { fn include_expand( &self, arg_id: MacroCallId, - ) -> Result< - (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId), - ExpandError, - >; + ) -> Result<(triomphe::Arc, base_db::FileId), ExpandError>; /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. @@ -181,21 +141,19 @@ pub fn expand_speculative( token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - let token_range = token_to_map.text_range(); + let file_id = loc.kind.file_id(); // Build the subtree and token mapping for the speculative args - let censor = censor_for_macro_input(&loc, speculative_args); - let mut fixups = fixup::fixup_syntax(speculative_args); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + let _censor = censor_for_macro_input(&loc, speculative_args); + let mut tt = mbe::syntax_node_to_token_tree( speculative_args, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, + // we don't leak these spans into any query so its fine to make them absolute + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), ); - let (attr_arg, token_id) = match loc.kind { + let attr_arg = match loc.kind { MacroCallKind::Attr { invoc_attr_index, .. } => { let attr = if loc.def.is_attribute_derive() { // for pseudo-derive expansion we actually pass the attribute itself only @@ -210,48 +168,27 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); - tree.delimiter = tt::Delimiter::unspecified(); - - let shift = mbe::Shift::new(&tt); - shift.shift_all(&mut tree); - - let token_id = if token_tree.syntax().text_range().contains_range(token_range) { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let range = token_range.checked_sub(attr_input_start)?; - let token_id = shift.shift(map.token_by_range(range)?); - Some(token_id) - } else { - None - }; - (Some(tree), token_id) - } - _ => (None, None), - } - } - _ => (None, None), - }; - let token_id = match token_id { - Some(token_id) => token_id, - // token wasn't inside an attribute input so it has to be in the general macro input - None => { - let range = token_range.checked_sub(speculative_args.text_range().start())?; - let token_id = spec_args_tmap.token_by_range(range)?; - match loc.def.kind { - MacroDefKind::Declarative(it) => { - db.decl_macro_expander(loc.krate, it).map_id_down(token_id) + let mut tree = syntax_node_to_token_tree( + token_tree.syntax(), + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), + ); + tree.delimiter = tt::Delimiter::UNSPECIFIED; + + Some(tree) } - _ => token_id, + _ => None, } } + _ => None, }; // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. - let mut speculative_expansion = match loc.def.kind { + let speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::unspecified(); + tt.delimiter = tt::Delimiter::UNSPECIFIED; expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { @@ -260,7 +197,17 @@ pub fn expand_speculative( MacroDefKind::BuiltInDerive(expander, ..) => { // this cast is a bit sus, can we avoid losing the typedness here? let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); - expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) + expander.expand( + db, + actual_macro_call, + &adt, + &map_from_syntax_node( + speculative_args, + // we don't leak these spans into any query so its fine to make them absolute + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + ), + ) } MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), @@ -271,12 +218,14 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); - fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info); - let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); + let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); let token = rev_tmap - .ranges_by_token(token_id, token_to_map.kind()) + .ranges_with_span(tt::SpanData { + range: token_to_map.text_range(), + anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + }) .filter_map(|range| syntax_node.covering_element(range).into_token()) .min_by_key(|t| { // prefer tokens of the same kind and text @@ -293,7 +242,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { match file_id.repr() { - HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(), + HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(), HirFileIdRepr::MacroFile(macro_file) => { db.parse_macro_expansion(macro_file).value.0.syntax_node() } @@ -315,7 +264,7 @@ fn parse_or_expand_with_err( fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFile, -) -> ExpandResult<(Parse, Arc)> { +) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); @@ -324,7 +273,7 @@ fn parse_macro_expansion( tracing::debug!("expanded = {}", tt.as_debug_string()); tracing::debug!("kind = {:?}", expand_to); - let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); + let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -340,48 +289,119 @@ fn parse_macro_expansion_error( fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, -) -> ValueResult< - Option>, - Arc>, -> { - let loc = db.lookup_intern_macro_call(id); - - if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { - return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); - } - - let ValueResult { value, err } = db.macro_arg_node(id); - let Some(arg) = value else { - return ValueResult { value: None, err }; +) -> ValueResult>, Arc>> { + let mismatched_delimiters = |arg: &SyntaxNode| { + let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); + let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); + let well_formed_tt = + matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); + if !well_formed_tt { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // Some day, we'll have explicit recursion counters for all + // recursive things, at which point this code might be removed. + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + Some(Arc::new(Box::new([SyntaxError::new( + "unbalanced token tree".to_owned(), + arg.text_range(), + )]) as Box<[_]>)) + } else { + None + } }; + let loc = db.lookup_intern_macro_call(id); + if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) + .then(|| loc.eager.as_deref()) + .flatten() + { + ValueResult::ok(Some(Arc::new(arg.0.clone()))) + } else { + let (parse, map) = match loc.kind.file_id().repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).to_syntax(), Arc::new(Default::default())) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse, map) + } + }; + let root = parse.syntax_node(); + + let (syntax, offset, ast_id) = match loc.kind { + MacroCallKind::FnLike { ast_id, .. } => { + let node = &ast_id.to_ptr(db).to_node(&root); + let offset = node.syntax().text_range().start(); + match node.token_tree().map(|it| it.syntax().clone()) { + Some(tt) => { + if let Some(e) = mismatched_delimiters(&tt) { + return ValueResult::only_err(e); + } + (tt, offset, ast_id.value.erase()) + } + None => { + return ValueResult::only_err(Arc::new(Box::new([ + SyntaxError::new_at_offset("missing token tree".to_owned(), offset), + ]))); + } + } + } + MacroCallKind::Derive { ast_id, .. } => { + let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); + let offset = syntax_node.text_range().start(); + (syntax_node, offset, ast_id.value.erase()) + } + MacroCallKind::Attr { ast_id, .. } => { + let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); + let offset = syntax_node.text_range().start(); + (syntax_node, offset, ast_id.value.erase()) + } + }; + let censor = censor_for_macro_input(&loc, &syntax); + // let mut fixups = fixup::fixup_syntax(&node); + // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); + // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + // &node, + // fixups.token_map, + // fixups.next_id, + // fixups.replace, + // fixups.append, + // ); + let mut tt = mbe::syntax_node_to_token_tree_censored( + &syntax, + SpanAnchor { file_id: loc.kind.file_id(), ast_id }, + offset, + &map, + censor, + ); + + if loc.def.is_proc_macro() { + // proc macros expect their inputs without parentheses, MBEs expect it with them included + tt.delimiter = tt::Delimiter::UNSPECIFIED; + } - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); - - if loc.def.is_proc_macro() { - // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::unspecified(); - } - let val = Some(Arc::new((tt, tmap, fixups.undo_info))); - match err { - Some(err) => ValueResult::new(val, err), - None => ValueResult::ok(val), + if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { + match parse.errors() { + [] => ValueResult::ok(Some(Arc::new(tt))), + errors => ValueResult::new( + Some(Arc::new(tt)), + // Box::<[_]>::from(res.errors()), not stable yet + Arc::new(errors.to_vec().into_boxed_slice()), + ), + } + } else { + ValueResult::ok(Some(Arc::new(tt))) + } } } +// FIXME: Censoring info should be calculated by the caller! Namely by name resolution /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - attributes expect the invoking attribute to be stripped -fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { +fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec { // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { @@ -417,103 +437,56 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet ValueResult, Arc>> { - let err = || -> Arc> { - Arc::new(Box::new([SyntaxError::new_at_offset( - "invalid macro call".to_owned(), - syntax::TextSize::from(0), - )])) - }; - let loc = db.lookup_intern_macro_call(id); - let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind { - let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { - Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0) - } else { - loc.kind - .arg(db) - .and_then(|arg| ast::TokenTree::cast(arg.value)) - .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax()) - }; - match res { - Some(res) if res.errors().is_empty() => res.syntax_node(), - Some(res) => { - return ValueResult::new( - Some(res.syntax_node().green().into()), - // Box::<[_]>::from(res.errors()), not stable yet - Arc::new(res.errors().to_vec().into_boxed_slice()), - ); - } - None => return ValueResult::only_err(err()), - } - } else { - match loc.kind.arg(db) { - Some(res) => res.value, - None => return ValueResult::only_err(err()), - } - }; - if matches!(loc.kind, MacroCallKind::FnLike { .. }) { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]))); - } - } - ValueResult::ok(Some(arg.green().into())) -} - fn decl_macro_expander( db: &dyn ExpandDatabase, def_crate: CrateId, id: AstId, ) -> Arc { let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; - let (mac, def_site_token_map) = match id.to_node(db) { + let (root, map) = match id.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).syntax_node(), Arc::new(Default::default())) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse.syntax_node(), map) + } + }; + let mac = match id.to_ptr(db).to_node(&root) { ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_rules.syntax().text_range().start(), + &map, + ); let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); - (mac, def_site_token_map) + mac } - None => ( - mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - Default::default(), + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, ), }, ast::Macro::MacroDef(macro_def) => match macro_def.body() { Some(arg) => { - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_def.syntax().text_range().start(), + &map, + ); let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); - (mac, def_site_token_map) + mac } - None => ( - mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - Default::default(), + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, ), }, }; - Arc::new(DeclarativeMacroExpander { mac, def_site_token_map }) + Arc::new(DeclarativeMacroExpander { mac }) } fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { @@ -536,25 +509,37 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult return db.expand_proc_macro(id), MacroDefKind::BuiltInDerive(expander, ..) => { - let arg = db.macro_arg_node(id).value.unwrap(); - - let node = SyntaxNode::new_root(arg); - let censor = censor_for_macro_input(&loc, &node); - let mut fixups = fixup::fixup_syntax(&node); - fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications( + // FIXME: add firewall query for this? + let hir_file_id = loc.kind.file_id(); + let (root, map) = match hir_file_id.repr() { + HirFileIdRepr::FileId(file_id) => (db.parse(file_id).syntax_node(), None), + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse.syntax_node(), Some(map)) + } + }; + let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; + let node = ast_id.to_ptr(db).to_node(&root); + + // FIXME: we might need to remove the spans from the input to the derive macro here + let _censor = censor_for_macro_input(&loc, node.syntax()); + let _t; + expander.expand( + db, + id, &node, - fixups.token_map, - fixups.next_id, - fixups.replace, - fixups.append, - ); - - // this cast is a bit sus, can we avoid losing the typedness here? - let adt = ast::Adt::cast(node).unwrap(); - let mut res = expander.expand(db, id, &adt, &tmap); - fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info); - res + match &map { + Some(map) => map, + None => { + _t = map_from_syntax_node( + node.syntax(), + SpanAnchor { file_id: hir_file_id, ast_id: ast_id.value.erase() }, + node.syntax().text_range().start(), + ); + &_t + } + }, + ) } _ => { let ValueResult { value, err } = db.macro_arg(id); @@ -570,8 +555,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) } @@ -583,11 +568,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - let mut arg = arg.clone(); - fixup::reverse_fixups(&mut arg, arg_tm, undo_info); - return ExpandResult { - value: Arc::new(arg), + value: Arc::new(arg.clone()), err: err.map(|err| { let mut buf = String::new(); for err in &**err { @@ -603,9 +585,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult it.expand(db, id, &arg).map_err(Into::into), MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), _ => unreachable!(), - }; - fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); - res + } } }; @@ -626,6 +606,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult> { + // FIXME: Syntax fix ups let loc = db.lookup_intern_macro_call(id); let Some(macro_arg) = db.macro_arg(id).value else { return ExpandResult { @@ -639,32 +620,24 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult expander, _ => unreachable!(), }; let attr_arg = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - let mut attr_args = attr_args.0.clone(); - mbe::Shift::new(arg_tt).shift_all(&mut attr_args); - Some(attr_args) - } + MacroCallKind::Attr { attr_args, .. } => Some(&**attr_args), _ => None, }; - let ExpandResult { value: mut tt, err } = - expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref()); + let ExpandResult { value: tt, err } = + expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg); // Set a hard limit for the expanded tt if let Err(value) = check_tt_count(&tt) { return value; } - fixup::reverse_fixups(&mut tt, arg_tm, undo_info); - ExpandResult { value: Arc::new(tt), err } } @@ -677,9 +650,10 @@ fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { } fn token_tree_to_syntax_node( + db: &dyn ExpandDatabase, tt: &tt::Subtree, expand_to: ExpandTo, -) -> (Parse, mbe::TokenMap) { +) -> (Parse, SpanMap) { let entry_point = match expand_to { ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Items => mbe::TopEntryPoint::MacroItems, @@ -687,7 +661,18 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point) + let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point); + // now what the hell is going on here + tm.1.span_map.sort_by(|(_, a), (_, b)| { + a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| { + let map = db.ast_id_map(a.anchor.file_id); + map.get_raw(a.anchor.ast_id) + .text_range() + .start() + .cmp(&map.get_raw(b.anchor.ast_id).text_range().start()) + }) + }); + tm } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult>> { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 4110f2847592d..ae5f26b5d36a2 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,8 +18,11 @@ //! //! //! See the full discussion : -use base_db::CrateId; -use rustc_hash::{FxHashMap, FxHashSet}; +use base_db::{ + span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + CrateId, +}; +use rustc_hash::FxHashMap; use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; use triomphe::Arc; @@ -29,7 +32,7 @@ use crate::{ hygiene::Hygiene, mod_path::ModPath, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, + MacroCallLoc, MacroDefId, MacroDefKind, SpanMap, }; pub fn expand_eager_macro_input( @@ -54,15 +57,15 @@ pub fn expand_eager_macro_input( eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, }); - let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = + let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); // we need this map here as the expansion of the eager input fake file loses whitespace ... - let mut ws_mapping = FxHashMap::default(); - if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() { - ws_mapping.extend(tm.entries().filter_map(|(id, range)| { - Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) - })); - } + // let mut ws_mapping = FxHashMap::default(); + // if let Some((tm)) = db.macro_arg(arg_id).value.as_deref() { + // ws_mapping.extend(tm.entries().filter_map(|(id, range)| { + // Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) + // })); + // } let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( @@ -75,49 +78,55 @@ pub fn expand_eager_macro_input( }; let err = parse_err.or(err); - let Some((expanded_eager_input, mapping)) = expanded_eager_input else { + let Some((expanded_eager_input, _mapping)) = expanded_eager_input else { return ExpandResult { value: None, err }; }; - let (mut subtree, expanded_eager_input_token_map) = - mbe::syntax_node_to_token_tree(&expanded_eager_input); + let mut subtree = mbe::syntax_node_to_token_tree( + &expanded_eager_input, + // is this right? + SpanAnchor { file_id: arg_id.as_file(), ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + // FIXME: Spans! `eager_macro_recur` needs to fill out a span map for us + &Default::default(), + ); - let og_tmap = if let Some(tt) = macro_call.value.token_tree() { - let mut ids_used = FxHashSet::default(); - let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); - // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside - // so we need to remap them to the original input of the eager macro. - subtree.visit_ids(&mut |id| { - // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix + // let og_tmap = if let Some(tt) = macro_call.value.token_tree() { + // let mut ids_used = FxHashSet::default(); + // let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); + // // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside + // // so we need to remap them to the original input of the eager macro. + // subtree.visit_ids(&mut |id| { + // // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix - if let Some(range) = expanded_eager_input_token_map - .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) - { - // remap from expanded eager input to eager input expansion - if let Some(og_range) = mapping.get(&range) { - // remap from eager input expansion to original eager input - if let Some(&og_range) = ws_mapping.get(og_range) { - if let Some(og_token) = og_tmap.token_by_range(og_range) { - ids_used.insert(og_token); - return og_token; - } - } - } - } - tt::TokenId::UNSPECIFIED - }); - og_tmap.filter(|id| ids_used.contains(&id)); - og_tmap - } else { - Default::default() - }; - subtree.delimiter = crate::tt::Delimiter::unspecified(); + // if let Some(range) = expanded_eager_input_token_map + // .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) + // { + // // remap from expanded eager input to eager input expansion + // if let Some(og_range) = mapping.get(&range) { + // // remap from eager input expansion to original eager input + // if let Some(&og_range) = ws_mapping.get(og_range) { + // if let Some(og_token) = og_tmap.token_by_range(og_range) { + // ids_used.insert(og_token); + // return og_token; + // } + // } + // } + // } + // tt::TokenId::UNSPECIFIED + // }); + // og_tmap.filter(|id| ids_used.contains(&id)); + // og_tmap + // } else { + // Default::default() + // }; + subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED; let loc = MacroCallLoc { def, krate, eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree, og_tmap)), + arg: Arc::new((subtree,)), arg_id, error: err.clone(), })), @@ -132,7 +141,7 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult<(InFile>, Arc)> { +) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -214,19 +223,19 @@ fn eager_macro_recur( let ExpandResult { value, err: err2 } = db.parse_macro_expansion(call_id.as_macro_file()); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = - apply_offset(call.syntax().text_range().start(), offset); - if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - value - .1 - .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } + // if let Some(tt) = call.token_tree() { + // let call_tt_start = tt.syntax().text_range().start(); + // let call_start = + // apply_offset(call.syntax().text_range().start(), offset); + // if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { + // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + // value + // .1 + // .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) + // .map(|r| (r + call_start, range + call_tt_start)) + // })); + // } + // } ExpandResult { value: Some(value.0.syntax_node().clone_for_update()), @@ -241,13 +250,8 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value: (parse, tm), err } = + let ExpandResult { value: (parse, _tm), err } = lazy_expand(db, &def, curr.with_value(call.clone()), krate); - let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { - Some(db.decl_macro_expander(def.krate, ast_id)) - } else { - None - }; // replace macro inside let hygiene = Hygiene::new(db, parse.file_id); @@ -261,24 +265,29 @@ fn eager_macro_recur( ); let err = err.or(error); - if let Some(tt) = call.token_tree() { - let call_tt_start = tt.syntax().text_range().start(); - let call_start = apply_offset(call.syntax().text_range().start(), offset); - if let Some((_tt, arg_map, _)) = parse - .file_id - .macro_file() - .and_then(|id| db.macro_arg(id.macro_call_id).value) - .as_deref() - { - mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - tm.first_range_by_token( - decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), - syntax::SyntaxKind::TOMBSTONE, - ) - .map(|r| (r + call_start, range + call_tt_start)) - })); - } - } + // if let Some(tt) = call.token_tree() { + // let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { + // Some(db.decl_macro_expander(def.krate, ast_id)) + // } else { + // None + // }; + // let call_tt_start = tt.syntax().text_range().start(); + // let call_start = apply_offset(call.syntax().text_range().start(), offset); + // if let Some((_tt, arg_map, _)) = parse + // .file_id + // .macro_file() + // .and_then(|id| db.macro_arg(id.macro_call_id).value) + // .as_deref() + // { + // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + // tm.first_range_by_token( + // decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), + // syntax::SyntaxKind::TOMBSTONE, + // ) + // .map(|r| (r + call_start, range + call_tt_start)) + // })); + // } + // } // FIXME: Do we need to re-use _m here? ExpandResult { value: value.map(|(n, _m)| n), err } } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index ca65db1136ce1..ce421d3dcd825 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -5,18 +5,16 @@ use base_db::CrateId; use db::TokenExpander; use either::Either; -use mbe::Origin; use syntax::{ ast::{self, HasDocComments}, - AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, + AstNode, SyntaxNode, TextRange, TextSize, }; use triomphe::Arc; use crate::{ db::{self, ExpandDatabase}, - fixup, name::{AsName, Name}, - HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, + HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap, }; #[derive(Clone, Debug)] @@ -50,23 +48,25 @@ impl Hygiene { Either::Left(name_ref.as_name()) } - pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option { - let mut token = path.syntax().first_token()?.text_range(); + pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option { + let mut _token = path.syntax().first_token()?.text_range(); let frames = self.frames.as_ref()?; - let mut current = &frames.0; - - loop { - let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; - if origin == Origin::Def { - return if current.local_inner { - frames.root_crate(db, path.syntax()) - } else { - None - }; - } - current = current.call_site.as_ref()?; - token = mapped.value; - } + let mut _current = &frames.0; + + // FIXME: Hygiene ... + return None; + // loop { + // let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; + // if origin == Origin::Def { + // return if current.local_inner { + // frames.root_crate(db, path.syntax()) + // } else { + // None + // }; + // } + // current = current.call_site.as_ref()?; + // token = mapped.value; + // } } } @@ -92,31 +92,33 @@ impl HygieneFrames { HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) } - fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { - let mut token = node.first_token()?.text_range(); - let mut result = self.0.krate; - let mut current = self.0.clone(); + fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { + let mut _token = node.first_token()?.text_range(); + let mut _result = self.0.krate; + let mut _current = self.0.clone(); - while let Some((mapped, origin)) = - current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) - { - result = current.krate; + return None; - let site = match origin { - Origin::Def => ¤t.def_site, - Origin::Call => ¤t.call_site, - }; + // while let Some((mapped, origin)) = + // current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) + // { + // result = current.krate; - let site = match site { - None => break, - Some(it) => it, - }; + // let site = match origin { + // Origin::Def => ¤t.def_site, + // Origin::Call => ¤t.call_site, + // }; - current = site.clone(); - token = mapped.value; - } + // let site = match site { + // None => break, + // Some(it) => it, + // }; + + // current = site.clone(); + // token = mapped.value; + // } - result + // result } } @@ -127,45 +129,18 @@ struct HygieneInfo { attr_input_or_mac_def_start: Option>, macro_def: TokenExpander, - macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - macro_arg_shift: mbe::Shift, - exp_map: Arc, + macro_arg: Arc, + exp_map: Arc, } impl HygieneInfo { - fn map_ident_up( + fn _map_ident_up( &self, - db: &dyn ExpandDatabase, - token: TextRange, - ) -> Option<(InFile, Origin)> { - let token_id = self.exp_map.token_by_range(token)?; - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let loc = db.lookup_intern_macro_call(self.file.macro_call_id); - - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def_start?) - } - None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())), - }, - _ => match origin { - mbe::Origin::Call => { - (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())) - } - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, *tt) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, - }, - }; - - let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; - Some((tt.with_value(range + tt.value), origin)) + _db: &dyn ExpandDatabase, + _token: TextRange, + ) -> Option> { + // self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range)) + None } } @@ -197,18 +172,13 @@ fn make_hygiene_info( let macro_def = db.macro_expander(loc.def); let (_, exp_map) = db.parse_macro_expansion(macro_file).value; let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) + Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) }); HygieneInfo { file: macro_file, attr_input_or_mac_def_start: attr_input_or_mac_def .map(|it| it.map(|tt| tt.syntax().text_range().start())), - macro_arg_shift: mbe::Shift::new(¯o_arg.0), macro_arg, macro_def, exp_map, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 4be55126b8621..bd5796e000ac7 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -18,21 +18,13 @@ pub mod quote; pub mod eager; pub mod mod_path; pub mod attrs; -mod fixup; +// mod fixup; -use mbe::TokenMap; -pub use mbe::{Origin, ValueResult}; - -use ::tt::token_id as tt; use triomphe::Arc; use std::{fmt, hash::Hash, iter}; -use base_db::{ - impl_intern_key, - salsa::{self, InternId}, - CrateId, FileId, FileRange, ProcMacroKind, -}; +use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -51,6 +43,25 @@ use crate::{ proc_macro::ProcMacroExpander, }; +pub use base_db::span::{HirFileId, MacroCallId, MacroFile}; +pub use mbe::ValueResult; + +pub type SpanMap = ::mbe::TokenMap; +pub type DeclarativeMacro = ::mbe::DeclarativeMacro; + +pub mod tt { + pub use base_db::span::SpanData; + pub use tt::{DelimiterKind, Spacing, Span}; + + pub type Delimiter = ::tt::Delimiter; + pub type Subtree = ::tt::Subtree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; + pub type TokenTree = ::tt::TokenTree; +} + pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] @@ -86,42 +97,43 @@ impl fmt::Display for ExpandError { } } -/// Input to the analyzer is a set of files, where each file is identified by -/// `FileId` and contains source code. However, another source of source code in -/// Rust are macros: each macro can be thought of as producing a "temporary -/// file". To assign an id to such a file, we use the id of the macro call that -/// produced the file. So, a `HirFileId` is either a `FileId` (source code -/// written by user), or a `MacroCallId` (source code produced by macro). -/// -/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file -/// containing the call plus the offset of the macro call in the file. Note that -/// this is a recursive definition! However, the size_of of `HirFileId` is -/// finite (because everything bottoms out at the real `FileId`) and small -/// (`MacroCallId` uses the location interning. You can check details here: -/// ). -/// -/// The two variants are encoded in a single u32 which are differentiated by the MSB. -/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a -/// `MacroCallId`. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(u32); - -impl fmt::Debug for HirFileId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.repr().fmt(f) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { - pub macro_call_id: MacroCallId, -} - /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroCallId(salsa::InternId); -impl_intern_key!(MacroCallId); +pub struct SyntaxContextId(base_db::salsa::InternId); +base_db::impl_intern_key!(SyntaxContextId); + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct SyntaxContext { + outer_expn: HirFileId, + outer_transparency: Transparency, + parent: SyntaxContextId, + /// This context, but with all transparent and semi-transparent expansions filtered away. + opaque: SyntaxContextId, + /// This context, but with all transparent expansions filtered away. + opaque_and_semitransparent: SyntaxContextId, + /// Name of the crate to which `$crate` with this context would resolve. + dollar_crate_name: name::Name, +} + +/// A property of a macro expansion that determines how identifiers +/// produced by that expansion are resolved. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] +pub enum Transparency { + /// Identifier produced by a transparent expansion is always resolved at call-site. + /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. + Transparent, + /// Identifier produced by a semi-transparent expansion may be resolved + /// either at call-site or at definition-site. + /// If it's a local variable, label or `$crate` then it's resolved at def-site. + /// Otherwise it's resolved at call-site. + /// `macro_rules` macros behave like this, built-in macros currently behave like this too, + /// but that's an implementation detail. + SemiTransparent, + /// Identifier produced by an opaque expansion is always resolved at definition-site. + /// Def-site spans in procedural macros, identifiers from `macro` by default use this. + Opaque, +} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { @@ -154,7 +166,7 @@ pub enum MacroDefKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct EagerCallInfo { /// The expanded argument of the eager macro. - arg: Arc<(tt::Subtree, TokenMap)>, + arg: Arc<(tt::Subtree,)>, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). arg_id: MacroCallId, error: Option, @@ -178,7 +190,7 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - attr_args: Arc<(tt::Subtree, mbe::TokenMap)>, + attr_args: Arc, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support @@ -187,34 +199,40 @@ pub enum MacroCallKind { }, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} +pub trait HirFileIdExt { + /// For macro-expansion files, returns the file original source file the + /// expansion originated from. + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; -impl From for HirFileId { - fn from(FileId(id): FileId) -> Self { - assert!(id < Self::MAX_FILE_ID); - HirFileId(id) - } -} + /// If this is a macro call, returns the syntax node of the call. + fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; -impl From for HirFileId { - fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { - let id = id.as_u32(); - assert!(id < Self::MAX_FILE_ID); - HirFileId(id | Self::MACRO_FILE_TAG_MASK) - } -} + /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)>; -impl HirFileId { - const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; - const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + /// Return expansion information if it is a macro-expansion file + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option; - /// For macro-expansion files, returns the file original source file the - /// expansion originated from. - pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { + fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) + -> Option>; + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + + /// Return whether this file is an include macro + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + /// Return whether this file is an attr macro + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; + + /// Return whether this file is the pseudo expansion of the derive attribute. + /// See [`crate::builtin_attr_macro::derive_attr_expand`]. + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; +} + +impl HirFileIdExt for HirFileId { + fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId { let mut file_id = self; loop { match file_id.repr() { @@ -231,7 +249,7 @@ impl HirFileId { } } - pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { let mut level = 0; let mut curr = self; while let Some(macro_file) = curr.macro_file() { @@ -243,15 +261,13 @@ impl HirFileId { level } - /// If this is a macro call, returns the syntax node of the call. - pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { + fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); Some(loc.to_node(db)) } - /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { @@ -264,12 +280,12 @@ impl HirFileId { } /// Return expansion information if it is a macro-expansion file - pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { let macro_file = self.macro_file()?; ExpansionInfo::new(db, macro_file) } - pub fn as_builtin_derive_attr_node( + fn as_builtin_derive_attr_node( &self, db: &dyn db::ExpandDatabase, ) -> Option> { @@ -282,7 +298,7 @@ impl HirFileId { Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } - pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { matches!( @@ -294,7 +310,7 @@ impl HirFileId { } } - pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { matches!( @@ -306,8 +322,7 @@ impl HirFileId { } } - /// Return whether this file is an include macro - pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include() @@ -316,7 +331,7 @@ impl HirFileId { } } - pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -326,8 +341,7 @@ impl HirFileId { } } - /// Return whether this file is an attr macro - pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -337,9 +351,7 @@ impl HirFileId { } } - /// Return whether this file is the pseudo expansion of the derive attribute. - /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -348,38 +360,6 @@ impl HirFileId { None => false, } } - - #[inline] - pub fn is_macro(self) -> bool { - self.0 & Self::MACRO_FILE_TAG_MASK != 0 - } - - #[inline] - pub fn macro_file(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => None, - _ => Some(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } - } - - #[inline] - pub fn file_id(self) -> Option { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => Some(FileId(self.0)), - _ => None, - } - } - - fn repr(self) -> HirFileIdRepr { - match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => HirFileIdRepr::FileId(FileId(self.0)), - _ => HirFileIdRepr::MacroFile(MacroFile { - macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), - }), - } - } } impl MacroDefId { @@ -587,16 +567,6 @@ impl MacroCallKind { } } -impl MacroCallId { - pub fn as_file(self) -> HirFileId { - MacroFile { macro_call_id: self }.into() - } - - pub fn as_macro_file(self) -> MacroFile { - MacroFile { macro_call_id: self } - } -} - /// ExpansionInfo mainly describes how to map text range between src and expanded macro #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { @@ -607,11 +577,8 @@ pub struct ExpansionInfo { attr_input_or_mac_def: Option>, macro_def: TokenExpander, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, - /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg - /// and as such we need to shift tokens if they are part of an attributes input instead of their item. - macro_arg_shift: mbe::Shift, - exp_map: Arc, + macro_arg: Arc, + exp_map: Arc, } impl ExpansionInfo { @@ -640,69 +607,33 @@ impl ExpansionInfo { pub fn map_token_down( &self, db: &dyn db::ExpandDatabase, - item: Option, token: InFile<&SyntaxToken>, // FIXME: use this for range mapping, so that we can resolve inline format args _relative_token_offset: Option, ) -> Option> + '_> { assert_eq!(token.file_id, self.arg.file_id); - let token_id_in_attr_input = if let Some(item) = item { - // check if we are mapping down in an attribute input - // this is a special case as attributes can have two inputs - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - let token_range = token.value.text_range(); - match &loc.kind { - MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => { - // FIXME: handle `cfg_attr` - let attr = item - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)?; - match attr.token_tree() { - Some(token_tree) - if token_tree.syntax().text_range().contains_range(token_range) => - { - let attr_input_start = - token_tree.left_delimiter_token()?.text_range().start(); - let relative_range = - token.value.text_range().checked_sub(attr_input_start)?; - // shift by the item's tree's max id - let token_id = attr_args.1.token_by_range(relative_range)?; - - let token_id = if loc.def.is_attribute_derive() { - // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens - token_id - } else { - self.macro_arg_shift.shift(token_id) - }; - Some(token_id) - } - _ => None, - } - } - _ => None, - } + let span_map = &self.exp_map.span_map; + let (start, end) = if span_map + .first() + .map_or(false, |(_, span)| span.anchor.file_id == token.file_id) + { + (0, span_map.partition_point(|a| a.1.anchor.file_id == token.file_id)) } else { - None - }; - - let token_id = match token_id_in_attr_input { - Some(token_id) => token_id, - // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual - None => { - let relative_range = - token.value.text_range().checked_sub(self.arg.value.text_range().start())?; - let token_id = self.macro_arg.1.token_by_range(relative_range)?; - // conditionally shift the id by a declarative macro definition - self.macro_def.map_id_down(token_id) - } + let start = span_map.partition_point(|a| a.1.anchor.file_id != token.file_id); + ( + start, + start + span_map[start..].partition_point(|a| a.1.anchor.file_id == token.file_id), + ) }; - - let tokens = self - .exp_map - .ranges_by_token(token_id, token.value.kind()) + let token_text_range = token.value.text_range(); + let ast_id_map = db.ast_id_map(token.file_id); + let tokens = span_map[start..end] + .iter() + .filter_map(move |(range, span)| { + let offset = ast_id_map.get_raw(span.anchor.ast_id).text_range().start(); + let abs_range = span.range + offset; + token_text_range.eq(&abs_range).then_some(*range) + }) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) @@ -713,60 +644,18 @@ impl ExpansionInfo { &self, db: &dyn db::ExpandDatabase, token: InFile<&SyntaxToken>, - ) -> Option<(InFile, Origin)> { - assert_eq!(token.file_id, self.expanded.file_id.into()); - // Fetch the id through its text range, - let token_id = self.exp_map.token_by_range(token.value.text_range())?; - // conditionally unshifting the id to accommodate for macro-rules def site - let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - - let call_id = self.expanded.file_id.macro_call_id; - let loc = db.lookup_intern_macro_call(call_id); - - // Special case: map tokens from `include!` expansions to the included file - if loc.def.is_include() { - if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) { - let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?; - let source = db.parse(file_id); - - let token = source.syntax_node().covering_element(range).into_token()?; - - return Some((InFile::new(file_id.into(), token), Origin::Call)); - } - } - - // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item. - let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } => { - if loc.def.is_attribute_derive() { - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } else { - // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input - // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this - match self.macro_arg_shift.unshift(token_id) { - Some(unshifted) => { - token_id = unshifted; - (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) - } - None => (&self.macro_arg.1, self.arg.clone()), - } - } - } - _ => match origin { - mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), - mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) { - (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { - (&expander.def_site_token_map, tt.syntax().cloned()) - } - _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), - }, - }, - }; - - let range = token_map.first_range_by_token(token_id, token.value.kind())?; - let token = - tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; - Some((tt.with_value(token), origin)) + ) -> Option> { + self.exp_map.span_for_range(token.value.text_range()).and_then(|span| { + let anchor = + db.ast_id_map(span.anchor.file_id).get_raw(span.anchor.ast_id).text_range().start(); + InFile::new( + span.anchor.file_id, + db.parse_or_expand(span.anchor.file_id) + .covering_element(span.range + anchor) + .into_token(), + ) + .transpose() + }) } fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option { @@ -779,11 +668,7 @@ impl ExpansionInfo { let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) + Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) }); let def = loc.def.ast_id().left().and_then(|id| { @@ -814,7 +699,6 @@ impl ExpansionInfo { expanded, arg: arg_tt, attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), macro_arg, macro_def, exp_map, @@ -1018,7 +902,7 @@ impl InFile<&SyntaxNode> { impl InFile { pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option> { let expansion = self.file_id.expansion_info(db)?; - expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it) + expansion.map_token_up(db, self.as_ref()) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -1067,6 +951,7 @@ impl From> for InFile { } } +// FIXME: Get rid of this fn ascend_node_border_tokens( db: &dyn db::ExpandDatabase, InFile { file_id, value: node }: InFile<&SyntaxNode>, @@ -1090,13 +975,13 @@ fn ascend_call_token( token: InFile, ) -> Option> { let mut mapping = expansion.map_token_up(db, token.as_ref())?; - while let (mapped, Origin::Call) = mapping { - match mapped.file_id.expansion_info(db) { - Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?, - None => return Some(mapped), + + loop { + match mapping.file_id.expansion_info(db) { + Some(info) => mapping = info.map_token_up(db, mapping.as_ref())?, + None => return Some(mapping), } } - None } impl InFile { diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index ab3809abc7a26..9dd4965c15066 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -18,8 +18,8 @@ macro_rules! __quote { crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::$delim, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: ::DUMMY, + close: ::DUMMY, }, token_trees: $crate::quote::IntoTt::to_tokens(children), } @@ -32,7 +32,7 @@ macro_rules! __quote { crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() ] } @@ -44,12 +44,12 @@ macro_rules! __quote { crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Joint, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into(), crate::tt::Leaf::Punct(crate::tt::Punct { char: $sec, spacing: crate::tt::Spacing::Alone, - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() ] } @@ -89,7 +89,7 @@ macro_rules! __quote { vec![ { crate::tt::Leaf::Ident(crate::tt::Ident { text: stringify!($tt).into(), - span: crate::tt::TokenId::unspecified(), + span: ::DUMMY, }).into() }] }; @@ -195,20 +195,22 @@ macro_rules! impl_to_to_tokentrees { } impl_to_to_tokentrees! { - u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; - bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + u32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + usize => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + i32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; + bool => self { crate::tt::Ident{text: self.to_string().into(), span: ::DUMMY} }; crate::tt::Leaf => self { self }; crate::tt::Literal => self { self }; crate::tt::Ident => self { self }; crate::tt::Punct => self { self }; - &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}; - String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}} + &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}}; + String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}} } #[cfg(test)] mod tests { + use expect_test::expect; + #[test] fn test_quote_delimiters() { assert_eq!(quote!({}).to_string(), "{}"); @@ -231,7 +233,10 @@ mod tests { } fn mk_ident(name: &str) -> crate::tt::Ident { - crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() } + crate::tt::Ident { + text: name.into(), + span: ::DUMMY, + } } #[test] @@ -241,7 +246,9 @@ mod tests { let quoted = quote!(#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); - assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295"); + expect![[r#" + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } }"#]].assert_eq(&t); } #[test] @@ -273,8 +280,8 @@ mod tests { let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::Brace, - open: crate::tt::TokenId::unspecified(), - close: crate::tt::TokenId::unspecified(), + open: ::DUMMY, + close: ::DUMMY, }, token_trees: fields.collect(), }; diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index f4c079b48c58c..c2ff487ef91d6 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -24,7 +24,7 @@ use hir_def::{ }; use hir_expand::{ name::{AsName, Name}, - HirFileId, + HirFileId, HirFileIdExt, }; use stdx::{always, never}; use syntax::{ diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs index 62efb858511b4..fbfb6ff8cddde 100644 --- a/crates/hir-ty/src/mir/eval.rs +++ b/crates/hir-ty/src/mir/eval.rs @@ -21,7 +21,7 @@ use hir_def::{ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, VariantId, }; -use hir_expand::{mod_path::ModPath, InFile}; +use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile}; use intern::Interned; use la_arena::ArenaMap; use rustc_hash::{FxHashMap, FxHashSet}; diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index 936581bfe32c3..f4129e736ee13 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -6,7 +6,7 @@ pub use hir_def::db::*; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, - MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, + ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1bfbf7212bf0a..50d88b4cf8389 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -124,7 +124,7 @@ pub use { hir_expand::{ attrs::{Attr, AttrId}, name::{known, Name}, - ExpandResult, HirFileId, InFile, MacroFile, Origin, + tt, ExpandResult, HirFileId, HirFileIdExt, InFile, MacroFile, }, hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index a42e0978b25f3..758e6118aa123 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -15,7 +15,7 @@ use hir_def::{ type_ref::Mutability, AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, }; -use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId}; +use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; @@ -549,7 +549,7 @@ impl<'db> SemanticsImpl<'db> { let mut mcache = self.macro_call_cache.borrow_mut(); let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| { + |stack: &mut SmallVec<_>, macro_file, token: InFile<&_>| { let expansion_info = cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) @@ -562,7 +562,6 @@ impl<'db> SemanticsImpl<'db> { let mapped_tokens = expansion_info.map_token_down( self.db.upcast(), - item, token, relative_token_offset, )?; @@ -587,17 +586,12 @@ impl<'db> SemanticsImpl<'db> { // Don't force populate the dyn cache for items that don't have an attribute anyways return None; } - Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item)) + Some(ctx.item_to_macro_call(token.with_value(item.clone()))?) }) }); - if let Some((call_id, item)) = containing_attribute_macro_call { + if let Some(call_id) = containing_attribute_macro_call { let file_id = call_id.as_file(); - return process_expansion_for_token( - &mut stack, - file_id, - Some(item), - token.as_ref(), - ); + return process_expansion_for_token(&mut stack, file_id, token.as_ref()); } // Then check for token trees, that means we are either in a function-like macro or @@ -622,7 +616,7 @@ impl<'db> SemanticsImpl<'db> { it } }; - process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) + process_expansion_for_token(&mut stack, file_id, token.as_ref()) } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute @@ -647,7 +641,6 @@ impl<'db> SemanticsImpl<'db> { return process_expansion_for_token( &mut stack, file_id, - Some(adt.into()), token.as_ref(), ); } @@ -679,13 +672,11 @@ impl<'db> SemanticsImpl<'db> { let id = self.db.ast_id_map(token.file_id).ast_id(&adt); let helpers = def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; - let item = Some(adt.into()); let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { res = res.or(process_expansion_for_token( &mut stack, derive.as_file(), - item.clone(), token.as_ref(), )); } diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index aabda3655602a..5b20c873157d2 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -97,7 +97,7 @@ use hir_def::{ FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, }; -use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::{impl_from, never}; diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 55c2f8324c6d0..8fc7f2c05d745 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -30,7 +30,7 @@ use hir_expand::{ mod_path::path, name, name::{AsName, Name}, - HirFileId, InFile, + HirFileId, HirFileIdExt, InFile, }; use hir_ty::{ diagnostics::{ diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 6839c5820dc99..4b9fedc7e8557 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -3,7 +3,7 @@ use std::{ iter, }; -use hir::{HasSource, ModuleSource}; +use hir::{HasSource, HirFileIdExt, ModuleSource}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index c9f272474e7e1..204e796fa2c0d 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,4 +1,6 @@ -use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; +use hir::{ + db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef, +}; use ide_db::base_db::FileId; use syntax::{ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, diff --git a/crates/ide-assists/src/handlers/generate_constant.rs b/crates/ide-assists/src/handlers/generate_constant.rs index eccd7675fbaae..a4e8e7388f624 100644 --- a/crates/ide-assists/src/handlers/generate_constant.rs +++ b/crates/ide-assists/src/handlers/generate_constant.rs @@ -1,5 +1,5 @@ use crate::assist_context::{AssistContext, Assists}; -use hir::{HasVisibility, HirDisplay, Module}; +use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module}; use ide_db::{ assists::{AssistId, AssistKind}, base_db::{FileId, Upcast}, diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 184f523e01bd9..be7a5e6c8bccc 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, HirDisplay, InFile}; +use hir::{HasSource, HirDisplay, HirFileIdExt, InFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ ast::{self, make, HasArgList}, diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index f74fc5df4bd26..a113c817f7e94 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1,5 +1,6 @@ use hir::{ - Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo, + Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type, + TypeInfo, }; use ide_db::{ base_db::FileId, @@ -510,7 +511,7 @@ fn assoc_fn_target_info( } fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { - match &target { + match target { GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), } diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index 5fcab8c02b06d..10076e60c3e8b 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,6 +1,6 @@ use std::collections::{hash_map::Entry, HashMap}; -use hir::{InFile, Module, ModuleSource}; +use hir::{HirFileIdExt, InFile, Module, ModuleSource}; use ide_db::{ base_db::FileRange, defs::Definition, diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 69a4e748b7c5d..1b373bcb8ce93 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,4 +1,4 @@ -use hir::{InFile, ModuleDef}; +use hir::{HirFileIdExt, InFile, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 7d38c638a8ed5..f49abcbae9bc4 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -683,7 +683,7 @@ pub(super) fn complete_name_ref( ctx: &CompletionContext<'_>, NameRefContext { nameref, kind }: &NameRefContext, ) { - match kind { + match dbg!(kind) { NameRefKind::Path(path_ctx) => { flyimport::import_on_the_fly_path(acc, ctx, path_ctx); diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs index 1e09894059d5a..5d138eea46f4b 100644 --- a/crates/ide-completion/src/completions/mod_.rs +++ b/crates/ide-completion/src/completions/mod_.rs @@ -2,7 +2,7 @@ use std::iter; -use hir::{Module, ModuleSource}; +use hir::{HirFileIdExt, Module, ModuleSource}; use ide_db::{ base_db::{SourceDatabaseExt, VfsPath}, FxHashSet, RootDatabase, SymbolKind, diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index a0b05c87ae73f..65eaa6510f0ed 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -99,7 +99,7 @@ impl RootDatabase { hir::db::AstIdMapQuery hir::db::ParseMacroExpansionQuery hir::db::InternMacroCallQuery - hir::db::MacroArgNodeQuery + hir::db::MacroArgQuery hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 226def4d52684..258d893b47da0 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -204,7 +204,7 @@ impl RootDatabase { hir_db::AstIdMapQuery // hir_db::ParseMacroExpansionQuery // hir_db::InternMacroCallQuery - hir_db::MacroArgNodeQuery + hir_db::MacroArgQuery hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 353a9749a37d9..cc9038fdfa2fe 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -24,7 +24,7 @@ use std::fmt; use base_db::{AnchoredPathBuf, FileId, FileRange}; use either::Either; -use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics}; +use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; use stdx::never; use syntax::{ ast::{self, HasName}, diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 22438a203bd78..68f2ad494570b 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,7 +8,8 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, + AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics, + Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 7834c66033c00..72a6eb5eabd14 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -21,9 +21,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -50,9 +48,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -79,9 +75,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -108,9 +102,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -137,9 +129,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -166,9 +156,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -195,9 +183,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 87ad5844c64cf..375ac5598152f 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -19,9 +19,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -46,9 +44,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -73,9 +69,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -102,9 +96,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -131,9 +123,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -160,9 +150,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -187,9 +175,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -216,9 +202,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -245,11 +229,7 @@ ), loc: DeclarationLocation { hir_file_id: MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -276,9 +256,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -307,9 +285,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -338,9 +314,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -365,9 +339,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -394,9 +366,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -423,9 +393,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, @@ -452,9 +420,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -481,9 +447,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -510,9 +474,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -537,9 +499,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -566,9 +526,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -593,9 +551,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -622,9 +578,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -649,9 +603,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -691,9 +643,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 0, - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -731,9 +681,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -760,9 +708,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -789,9 +735,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -818,9 +762,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, @@ -847,9 +789,7 @@ ), loc: DeclarationLocation { hir_file_id: FileId( - FileId( - 1, - ), + 1, ), ptr: SyntaxNodePtr { kind: USE_TREE, diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index d7dca1083a076..cb38bc54d7d61 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -1,7 +1,7 @@ use either::Either; use hir::{ db::{ExpandDatabase, HirDatabase}, - known, AssocItem, HirDisplay, InFile, Type, + known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type, }; use ide_db::{ assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 0f695b2745a8a..f93a35cf181c2 100644 --- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -1,4 +1,5 @@ use hir::db::ExpandDatabase; +use hir::HirFileIdExt; use ide_db::{assists::Assist, source_change::SourceChange}; use syntax::{ast, SyntaxNode}; use syntax::{match_ast, AstNode}; diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index ee8a9c95793c1..0abcbffe72b6b 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics}; +use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics}; use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; use syntax::{ ast::{self, edit::IndentLevel, make}, diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index d15233d15c2c5..258ac6cd82338 100644 --- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, InFile}; +use hir::{db::ExpandDatabase, HirFileIdExt, InFile}; use ide_db::source_change::SourceChange; use syntax::{ ast::{self, HasArgList}, diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c92d92ceae8c8..fd00535d0c34f 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type}; +use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ ast::{self, BlockExpr, ExprStmt}, diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index be24e50c9871d..bbbd21741e5df 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -1,4 +1,4 @@ -use hir::db::ExpandDatabase; +use hir::{db::ExpandDatabase, HirFileIdExt}; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use itertools::Itertools; use syntax::AstNode; diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index f834f2ce59279..6f41f51f80ec5 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -149,7 +149,7 @@ mod tests { fn check_hierarchy( ra_fixture: &str, - expected: Expect, + expected_nav: Expect, expected_incoming: Expect, expected_outgoing: Expect, ) { @@ -158,7 +158,7 @@ mod tests { let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); - expected.assert_eq(&nav.debug_render()); + expected_nav.assert_eq(&nav.debug_render()); let item_pos = FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 3220774567750..f7c6a0139e0c8 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::Semantics; +use hir::{HirFileIdExt, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index e09b9f3914820..59e8578cf154c 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -60,13 +60,13 @@ pub(crate) fn goto_definition( .into_iter() .filter_map(|token| { let parent = token.parent()?; - if let Some(tt) = ast::TokenTree::cast(parent) { + if let Some(tt) = ast::TokenTree::cast(parent.clone()) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { return Some(vec![x]); } } Some( - IdentClass::classify_token(sema, &token)? + IdentClass::classify_node(sema, &parent)? .definitions() .into_iter() .flat_map(|def| { @@ -392,6 +392,8 @@ fn bar() { ); } + // FIXME: We should emit two targets here, one for the identifier in the declaration, one for + // the macro call #[test] fn goto_def_for_macro_defined_fn_no_arg() { check( @@ -399,10 +401,10 @@ fn bar() { //- /lib.rs macro_rules! define_fn { () => (fn foo() {}) + //^^^ } define_fn!(); -//^^^^^^^^^^^^^ fn bar() { $0foo(); diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index ac9df5ed6d1f0..ad1eb2499718f 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -4,7 +4,7 @@ //! tests. This module also implements a couple of magic tricks, like renaming //! `self` and to `self` (to switch between associated function and method). -use hir::{AsAssocItem, InFile, Semantics}; +use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 07cdddd15f82e..954a364c78707 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -2,7 +2,7 @@ use std::fmt; use ast::HasName; use cfg::CfgExpr; -use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics}; +use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics}; use ide_assists::utils::test_related_attribute; use ide_db::{ base_db::{FilePosition, FileRange}, diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index aabd26da289ca..7ee50f7a67f8b 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide_db::helpers::get_definition; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 7d00282fc14bd..1bffab29cf93a 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -1,6 +1,6 @@ //! Computes color for a single element. -use hir::{AsAssocItem, HasVisibility, Semantics}; +use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 68b592ffaa4de..4d1319094933f 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -4,13 +4,12 @@ // to run rust-analyzer as a library. use std::{collections::hash_map::Entry, mem, path::Path, sync}; -use ::tt::token_id as tt; use crossbeam_channel::{unbounded, Receiver}; use ide::{AnalysisHost, Change, SourceRoot}; use ide_db::{ base_db::{ - CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, - ProcMacroLoadResult, ProcMacros, + span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, + ProcMacroKind, ProcMacroLoadResult, ProcMacros, }, FxHashMap, }; @@ -374,16 +373,19 @@ struct Expander(proc_macro_api::ProcMacro); impl ProcMacroExpander for Expander { fn expand( &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, env: &Env, - ) -> Result { - let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - match self.0.expand(subtree, attrs, env) { - Ok(Ok(subtree)) => Ok(subtree), - Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), - Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), - } + ) -> Result, ProcMacroExpansionError> { + let _ = (subtree, attrs, env); + + // let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + // match self.0.expand(subtree, attrs, env) { + // Ok(Ok(subtree)) => Ok(subtree), + // Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + // Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), + // } + todo!() } } @@ -394,10 +396,10 @@ struct IdentityExpander; impl ProcMacroExpander for IdentityExpander { fn expand( &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, + subtree: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } } @@ -409,10 +411,10 @@ struct EmptyExpander; impl ProcMacroExpander for EmptyExpander { fn expand( &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, + _: &tt::Subtree, + _: Option<&tt::Subtree>, _: &Env, - ) -> Result { + ) -> Result, ProcMacroExpansionError> { Ok(tt::Subtree::empty()) } } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 19cb20354b74e..4f60e90773ee3 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,13 +6,19 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; -use tt::{Span, TokenId}; +use tt::{Span, SpanData}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, syntax_node_to_token_tree, DeclarativeMacro, }; +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +struct DummyFile; +impl Span for DummyFile { + const DUMMY: Self = DummyFile; +} + #[test] fn benchmark_parse_macro_rules() { if skip_slow_tests() { @@ -39,7 +45,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(tt); + let res = rules[&id].expand(&tt); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -48,14 +54,14 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap { +fn macro_rules_fixtures() -> FxHashMap>> { macro_rules_fixtures_tt() .into_iter() .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap> { +fn macro_rules_fixtures_tt() -> FxHashMap>> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -65,7 +71,12 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax()); + let def_tt = syntax_node_to_token_tree( + rule.token_tree().unwrap().syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); (id, def_tt) }) .collect() @@ -73,8 +84,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { /// Generate random invocation fixtures from rules fn invocation_fixtures( - rules: &FxHashMap, -) -> Vec<(String, tt::Subtree)> { + rules: &FxHashMap>>, +) -> Vec<(String, tt::Subtree>)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -96,8 +107,8 @@ fn invocation_fixtures( loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, + open: SpanData::DUMMY, + close: SpanData::DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -105,7 +116,7 @@ fn invocation_fixtures( for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(subtree.clone()).err.is_none() { + if it.expand(&subtree).err.is_none() { res.push((name.clone(), subtree)); break; } @@ -119,7 +130,11 @@ fn invocation_fixtures( } return res; - fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + fn collect_from_op( + op: &Op>, + parent: &mut tt::Subtree>, + seed: &mut usize, + ) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -205,32 +220,22 @@ fn invocation_fixtures( *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { span: tt::TokenId::DUMMY, text: SmolStr::new(ident) }) - .into() + fn make_ident(ident: &str) -> tt::TokenTree> { + tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into() } - fn make_punct(char: char) -> tt::TokenTree { - tt::Leaf::Punct(tt::Punct { - span: tt::TokenId::DUMMY, - char, - spacing: tt::Spacing::Alone, - }) - .into() - } - fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { span: tt::TokenId::DUMMY, text: SmolStr::new(lit) }) + fn make_punct(char: char) -> tt::TokenTree> { + tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone }) .into() } + fn make_literal(lit: &str) -> tt::TokenTree> { + tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into() + } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>>, - ) -> tt::TokenTree { + token_trees: Option>>>, + ) -> tt::TokenTree> { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, - kind, - }, + delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 46599802935e1..43543479eb94a 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -19,7 +19,7 @@ mod benchmark; mod token_map; use stdx::impl_from; -use tt::{Span, TokenId}; +use tt::Span; use std::fmt; @@ -34,10 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct}; pub use crate::{ syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map, - syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree, - syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken, - SyntheticTokenId, + map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + syntax_node_to_token_tree_censored, token_tree_to_syntax_node, }, token_map::TokenMap, }; @@ -125,10 +123,8 @@ impl fmt::Display for CountError { /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] -pub struct DeclarativeMacro { - rules: Box<[Rule]>, - /// Highest id of the token we have in TokenMap - shift: Shift, +pub struct DeclarativeMacro { + rules: Box<[Rule]>, // This is used for correctly determining the behavior of the pat fragment // FIXME: This should be tracked by hygiene of the fragment identifier! is_2021: bool, @@ -141,91 +137,13 @@ struct Rule { rhs: MetaTemplate, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Shift(u32); - -impl Shift { - pub fn new(tt: &tt::Subtree) -> Shift { - // Note that TokenId is started from zero, - // We have to add 1 to prevent duplication. - let value = max_id(tt).map_or(0, |it| it + 1); - return Shift(value); - - // Find the max token id inside a subtree - fn max_id(subtree: &tt::Subtree) -> Option { - let filter = - |tt: &_| match tt { - tt::TokenTree::Subtree(subtree) => { - let tree_id = max_id(subtree); - if subtree.delimiter.open != tt::TokenId::unspecified() { - Some(tree_id.map_or(subtree.delimiter.open.0, |t| { - t.max(subtree.delimiter.open.0) - })) - } else { - tree_id - } - } - tt::TokenTree::Leaf(leaf) => { - let &(tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf; - - (span != tt::TokenId::unspecified()).then_some(span.0) - } - }; - subtree.token_trees.iter().filter_map(filter).max() - } - } - - /// Shift given TokenTree token id - pub fn shift_all(self, tt: &mut tt::Subtree) { - for t in &mut tt.token_trees { - match t { - tt::TokenTree::Leaf( - tt::Leaf::Ident(tt::Ident { span, .. }) - | tt::Leaf::Punct(tt::Punct { span, .. }) - | tt::Leaf::Literal(tt::Literal { span, .. }), - ) => *span = self.shift(*span), - tt::TokenTree::Subtree(tt) => { - tt.delimiter.open = self.shift(tt.delimiter.open); - tt.delimiter.close = self.shift(tt.delimiter.close); - self.shift_all(tt) - } - } - } - } - - pub fn shift(self, id: tt::TokenId) -> tt::TokenId { - if id == tt::TokenId::unspecified() { - id - } else { - tt::TokenId(id.0 + self.0) - } - } - - pub fn unshift(self, id: tt::TokenId) -> Option { - id.0.checked_sub(self.0).map(tt::TokenId) - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Origin { - Def, - Call, -} - -impl DeclarativeMacro { - pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { - DeclarativeMacro { - rules: Box::default(), - shift: Shift(0), - is_2021, - err: Some(Box::new(err)), - } +impl DeclarativeMacro { + pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { + DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) } } /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. @@ -257,11 +175,11 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { + pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); let mut err = None; @@ -308,31 +226,15 @@ impl DeclarativeMacro { } } - DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } - } - - pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult> { - self.shift.shift_all(&mut tt); - expander::expand_rules(&self.rules, &tt, self.is_2021) + DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err } } pub fn err(&self) -> Option<&ParseError> { self.err.as_deref() } - pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { - self.shift.shift(id) - } - - pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { - match self.shift.unshift(id) { - Some(id) => (id, Origin::Call), - None => (id, Origin::Def), - } - } - - pub fn shift(&self) -> Shift { - self.shift + pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult> { + expander::expand_rules(&self.rules, &tt, self.is_2021) } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 01aab6b659cb9..c8c2e5dcd55a0 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,16 +1,15 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. -use rustc_hash::FxHashMap; -use stdx::{always, non_empty_vec::NonEmptyVec}; +use stdx::non_empty_vec::NonEmptyVec; use syntax::{ ast::{self, make::tokens::doc_comment}, - AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, + AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; use tt::{ buffer::{Cursor, TokenBuffer}, - TokenId, + Span, SpanData, }; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; @@ -20,75 +19,37 @@ mod tests; /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { - let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ); - (subtree, token_map) -} - -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_tree_with_modifications( +/// `anchor` and `anchor_offset` are used to convert the node's spans +/// to relative spans, relative to the passed anchor. +/// `map` is used to resolve the converted spans accordingly. +/// TODO: Flesh out the doc comment more thoroughly +pub fn syntax_node_to_token_tree( node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (tt::Subtree, TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - let subtree = convert_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (subtree, c.id_alloc.map, c.id_alloc.next_id) -} - -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap { - syntax_node_to_token_map_with_modifications( - node, - Default::default(), - 0, - Default::default(), - Default::default(), - ) - .0 + anchor: SpanAnchor, + anchor_offset: TextSize, + map: &TokenMap>, +) -> tt::Subtree> +where + SpanData: Span, +{ + assert!(anchor_offset <= node.text_range().start()); + let mut c = Converter::new(node, anchor_offset, anchor, vec![], map); + convert_tokens(&mut c) } -/// Convert the syntax node to a `TokenTree` (what macro will consume) -/// with the censored range excluded. -pub fn syntax_node_to_token_map_with_modifications( +pub fn syntax_node_to_token_tree_censored( node: &SyntaxNode, - existing_token_map: TokenMap, - next_id: u32, - replace: FxHashMap>, - append: FxHashMap>, -) -> (TokenMap, u32) { - let global_offset = node.text_range().start(); - let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); - collect_tokens(&mut c); - c.id_alloc.map.shrink_to_fit(); - always!(c.replace.is_empty(), "replace: {:?}", c.replace); - always!(c.append.is_empty(), "append: {:?}", c.append); - (c.id_alloc.map, c.id_alloc.next_id) -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct SyntheticTokenId(pub u32); - -#[derive(Debug, Clone)] -pub struct SyntheticToken { - pub kind: SyntaxKind, - pub text: SmolStr, - pub range: TextRange, - pub id: SyntheticTokenId, + anchor: SpanAnchor, + anchor_offset: TextSize, + map: &TokenMap>, + censored: Vec, +) -> tt::Subtree> +where + SpanData: Span, +{ + assert!(anchor_offset <= node.text_range().start()); + let mut c = Converter::new(node, anchor_offset, anchor, censored, map); + convert_tokens(&mut c) } // The following items are what `rustc` macro can be parsed into : @@ -103,10 +64,13 @@ pub struct SyntheticToken { // * AssocItems(SmallVec<[ast::AssocItem; 1]>) // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree, +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, TokenMap) { +) -> (Parse, TokenMap>) +where + SpanData: Span, +{ let buffer = match tt { tt::Subtree { delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. }, @@ -133,29 +97,40 @@ pub fn token_tree_to_syntax_node( tree_sink.finish() } +pub fn map_from_syntax_node( + node: &SyntaxNode, + anchor: SpanAnchor, + anchor_offset: TextSize, +) -> TokenMap> +where + SpanAnchor: Copy, + SpanData: Span, +{ + let mut map = TokenMap::default(); + node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { + map.insert(t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor }); + }); + map +} + /// Convert a string to a `TokenTree` -pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { +pub fn parse_to_token_tree( + text: &str, + file_id: SpanAnchor, +) -> Option>> +where + SpanData: Span, +{ let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } - - let mut conv = RawConverter { - lexed, - pos: 0, - id_alloc: TokenIdAlloc { - map: Default::default(), - global_offset: TextSize::default(), - next_id: 0, - }, - }; - - let subtree = convert_tokens(&mut conv); - Some((subtree, conv.id_alloc.map)) + let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default(), file_id }; + Some(convert_tokens(&mut conv)) } /// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { return Vec::new(); } @@ -191,47 +166,33 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec(conv: &mut C) -> tt::Subtree { - struct StackEntry { - subtree: tt::Subtree, - idx: usize, - open_range: TextRange, - } - - let entry = StackEntry { - subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; +fn convert_tokens>( + conv: &mut C, +) -> tt::Subtree> +where + SpanData: Span, + SpanAnchor: Copy, +{ + let entry = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); + let anchor = conv.anchor(); loop { - let StackEntry { subtree, .. } = stack.last_mut(); + let subtree = stack.last_mut(); let result = &mut subtree.token_trees; - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); + let Some((token, rel_range, abs_range)) = conv.bump() else { break }; let kind = token.kind(conv); if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); + if let Some(tokens) = conv.convert_doc_comment( + &token, + conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), + ) { result.extend(tokens); } continue; } let tt = if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - let expected = match subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => Some(T![')']), tt::DelimiterKind::Brace => Some(T!['}']), @@ -241,9 +202,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { if let Some(expected) = expected { if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - stack.last_mut().subtree.token_trees.push(entry.subtree.into()); + if let Some(mut subtree) = stack.pop() { + subtree.delimiter.close = conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }); + stack.last_mut().token_trees.push(subtree.into()); } continue; } @@ -257,12 +220,18 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { }; if let Some(kind) = delim { - let (id, idx) = conv.id_alloc().open_delim(range, synth_id); let subtree = tt::Subtree { - delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind }, + delimiter: tt::Delimiter { + // FIXME: Open and close spans + open: conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }), + close: Span::DUMMY, + kind, + }, token_trees: vec![], }; - stack.push(StackEntry { subtree, idx, open_range: range }); + stack.push(subtree); continue; } @@ -279,39 +248,43 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { tt::Leaf::from(tt::Punct { char, spacing, - span: conv.id_alloc().alloc(range, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), }) .into() } else { macro_rules! make_leaf { ($i:ident) => { tt::$i { - span: conv.id_alloc().alloc(range, synth_id), + span: conv + .span_for(abs_range) + .unwrap_or(SpanData { range: rel_range, anchor }), text: token.to_text(conv), } .into() }; } - let leaf: tt::Leaf = match kind { + let leaf: tt::Leaf<_> = match kind { T![true] | T![false] => make_leaf!(Ident), IDENT => make_leaf!(Ident), UNDERSCORE => make_leaf!(Ident), k if k.is_keyword() => make_leaf!(Ident), k if k.is_literal() => make_leaf!(Literal), + // FIXME: Check whether span splitting works as intended LIFETIME_IDENT => { let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); + let r = TextRange::at(rel_range.start(), char_unit); let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', spacing: tt::Spacing::Joint, - span: conv.id_alloc().alloc(r, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), }); result.push(apostrophe.into()); - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); + let r = + TextRange::at(rel_range.start() + char_unit, rel_range.len() - char_unit); let ident = tt::Leaf::from(tt::Ident { text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.id_alloc().alloc(r, synth_id), + span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), }); result.push(ident.into()); continue; @@ -330,10 +303,9 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { while let Some(entry) = stack.pop() { let parent = stack.last_mut(); - conv.id_alloc().close_delim(entry.idx, None); - let leaf: tt::Leaf = tt::Punct { - span: conv.id_alloc().alloc(entry.open_range, None), - char: match entry.subtree.delimiter.kind { + let leaf: tt::Leaf<_> = tt::Punct { + span: entry.delimiter.open, + char: match entry.delimiter.kind { tt::DelimiterKind::Parenthesis => '(', tt::DelimiterKind::Brace => '{', tt::DelimiterKind::Bracket => '[', @@ -342,11 +314,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { spacing: tt::Spacing::Alone, } .into(); - parent.subtree.token_trees.push(leaf.into()); - parent.subtree.token_trees.extend(entry.subtree.token_trees); + parent.token_trees.push(leaf.into()); + parent.token_trees.extend(entry.token_trees); } - let subtree = stack.into_last().subtree; + let subtree = stack.into_last(); if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees { first.clone() } else { @@ -354,111 +326,6 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { } } -fn collect_tokens(conv: &mut C) { - struct StackEntry { - idx: usize, - open_range: TextRange, - delimiter: tt::DelimiterKind, - } - - let entry = StackEntry { - delimiter: tt::DelimiterKind::Invisible, - // never used (delimiter is `None`) - idx: !0, - open_range: TextRange::empty(TextSize::of('.')), - }; - let mut stack = NonEmptyVec::new(entry); - - loop { - let StackEntry { delimiter, .. } = stack.last_mut(); - let (token, range) = match conv.bump() { - Some(it) => it, - None => break, - }; - let synth_id = token.synthetic_id(conv); - - let kind = token.kind(conv); - if kind == COMMENT { - // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can - // figure out which token id to use for the doc comment, if it is converted successfully. - let next_id = conv.id_alloc().peek_next_id(); - if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) { - let id = conv.id_alloc().alloc(range, synth_id); - debug_assert_eq!(id, next_id); - } - continue; - } - if kind.is_punct() && kind != UNDERSCORE { - if synth_id.is_none() { - assert_eq!(range.len(), TextSize::of('.')); - } - - let expected = match delimiter { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; - - if let Some(expected) = expected { - if kind == expected { - if let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, Some(range)); - } - continue; - } - } - - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; - - if let Some(kind) = delim { - let (_id, idx) = conv.id_alloc().open_delim(range, synth_id); - - stack.push(StackEntry { idx, open_range: range, delimiter: kind }); - continue; - } - - conv.id_alloc().alloc(range, synth_id); - } else { - macro_rules! make_leaf { - ($i:ident) => {{ - conv.id_alloc().alloc(range, synth_id); - }}; - } - match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(range.start(), char_unit); - conv.id_alloc().alloc(r, synth_id); - - let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); - conv.id_alloc().alloc(r, synth_id); - continue; - } - _ => continue, - }; - }; - - // If we get here, we've consumed all input tokens. - // We might have more than one subtree in the stack, if the delimiters are improperly balanced. - // Merge them so we're left with one. - while let Some(entry) = stack.pop() { - conv.id_alloc().close_delim(entry.idx, None); - conv.id_alloc().alloc(entry.open_range, None); - } - } -} - fn is_single_token_op(kind: SyntaxKind) -> bool { matches!( kind, @@ -507,112 +374,54 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { text.into() } -fn convert_doc_comment( +fn convert_doc_comment( token: &syntax::SyntaxToken, - span: tt::TokenId, -) -> Option>> { + span: S, +) -> Option>> { cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; - // Make `doc="\" Comments\"" - let meta_tkns = - vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)]; - - // Make `#![]` - let mut token_trees = Vec::with_capacity(3); - token_trees.push(mk_punct('#', span)); - if let ast::CommentPlacement::Inner = doc { - token_trees.push(mk_punct('!', span)); - } - token_trees.push(tt::TokenTree::from(tt::Subtree { - delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, - token_trees: meta_tkns, - })); - - return Some(token_trees); + let mk_ident = + |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })); - // Helper functions - fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree { - tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span })) - } - - fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree { + let mk_punct = |c: char| { tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, span, })) - } + }; - fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree { + let mk_doc_literal = |comment: &ast::Comment| { let lit = tt::Literal { text: doc_comment_text(comment), span }; tt::TokenTree::from(tt::Leaf::from(lit)) - } -} - -struct TokenIdAlloc { - map: TokenMap, - global_offset: TextSize, - next_id: u32, -} - -impl TokenIdAlloc { - fn alloc( - &mut self, - absolute_range: TextRange, - synthetic_id: Option, - ) -> tt::TokenId { - let relative_range = absolute_range - self.global_offset; - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - self.map.insert(token_id, relative_range); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - token_id - } + }; - fn open_delim( - &mut self, - open_abs_range: TextRange, - synthetic_id: Option, - ) -> (tt::TokenId, usize) { - let token_id = tt::TokenId(self.next_id); - self.next_id += 1; - let idx = self.map.insert_delim( - token_id, - open_abs_range - self.global_offset, - open_abs_range - self.global_offset, - ); - if let Some(id) = synthetic_id { - self.map.insert_synthetic(token_id, id); - } - (token_id, idx) - } + // Make `doc="\" Comments\"" + let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; - fn close_delim(&mut self, idx: usize, close_abs_range: Option) { - match close_abs_range { - None => { - self.map.remove_delim(idx); - } - Some(close) => { - self.map.update_close_delim(idx, close - self.global_offset); - } - } + // Make `#![]` + let mut token_trees = Vec::with_capacity(3); + token_trees.push(mk_punct('#')); + if let ast::CommentPlacement::Inner = doc { + token_trees.push(mk_punct('!')); } + token_trees.push(tt::TokenTree::from(tt::Subtree { + delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket }, + token_trees: meta_tkns, + })); - fn peek_next_id(&self) -> tt::TokenId { - tt::TokenId(self.next_id) - } + Some(token_trees) } /// A raw token (straight from lexer) converter -struct RawConverter<'a> { +struct RawConverter<'a, SpanAnchor> { lexed: parser::LexedStr<'a>, pos: usize, - id_alloc: TokenIdAlloc, + _offset: TextSize, + file_id: SpanAnchor, } trait SrcToken: std::fmt::Debug { @@ -621,66 +430,64 @@ trait SrcToken: std::fmt::Debug { fn to_char(&self, ctx: &Ctx) -> Option; fn to_text(&self, ctx: &Ctx) -> SmolStr; - - fn synthetic_id(&self, ctx: &Ctx) -> Option; } -trait TokenConverter: Sized { +trait TokenConverter: Sized { type Token: SrcToken; fn convert_doc_comment( &self, token: &Self::Token, - span: tt::TokenId, - ) -> Option>>; + span: SpanData, + ) -> Option>>>; - fn bump(&mut self) -> Option<(Self::Token, TextRange)>; + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>; fn peek(&self) -> Option; - fn id_alloc(&mut self) -> &mut TokenIdAlloc; + fn anchor(&self) -> SpanAnchor; + fn span_for(&self, range: TextRange) -> Option>; } -impl SrcToken> for usize { - fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { +impl SrcToken> for usize { + fn kind(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, SpanAnchor>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SmolStr { ctx.lexed.text(*self).into() } - - fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option { - None - } } -impl TokenConverter for RawConverter<'_> { +impl TokenConverter for RawConverter<'_, SpanAnchor> +where + SpanData: Span, +{ type Token = usize; fn convert_doc_comment( &self, &token: &usize, - span: tt::TokenId, - ) -> Option>> { + span: SpanData, + ) -> Option>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { if self.pos == self.lexed.len() { return None; } let token = self.pos; self.pos += 1; let range = self.lexed.text_range(token); - let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); - Some((token, range)) + Some((token, range, range)) } fn peek(&self) -> Option { @@ -690,77 +497,60 @@ impl TokenConverter for RawConverter<'_> { Some(self.pos) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn anchor(&self) -> SpanAnchor { + self.file_id + } + fn span_for(&self, _: TextRange) -> Option> { + None } } -struct Converter { - id_alloc: TokenIdAlloc, +struct Converter<'a, SpanAnchor> { current: Option, - current_synthetic: Vec, preorder: PreorderWithTokens, - replace: FxHashMap>, - append: FxHashMap>, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, + /// Used to make the emitted text ranges in the spans relative to the span anchor. + offset: TextSize, + file_id: SpanAnchor, + map: &'a TokenMap>, + censored: Vec, } -impl Converter { +impl<'a, SpanAnchor> Converter<'a, SpanAnchor> { fn new( node: &SyntaxNode, - global_offset: TextSize, - existing_token_map: TokenMap, - next_id: u32, - mut replace: FxHashMap>, - mut append: FxHashMap>, - ) -> Converter { + anchor_offset: TextSize, + file_id: SpanAnchor, + censored: Vec, + map: &'a TokenMap>, + ) -> Converter<'a, SpanAnchor> { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); - let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append); + let first = Self::next_token(&mut preorder, &censored); Converter { - id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } }, current: first, - current_synthetic: synthetic, preorder, range, - replace, - append, punct_offset: None, + offset: anchor_offset, + file_id, + censored, + map, } } - fn next_token( - preorder: &mut PreorderWithTokens, - replace: &mut FxHashMap>, - append: &mut FxHashMap>, - ) -> (Option, Vec) { + fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option { while let Some(ev) = preorder.next() { - let ele = match ev { - WalkEvent::Enter(ele) => ele, - WalkEvent::Leave(ele) => { - if let Some(mut v) = append.remove(&ele) { - if !v.is_empty() { - v.reverse(); - return (None, v); - } - } - continue; + match ev { + WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), + WalkEvent::Enter(SyntaxElement::Node(n)) if censor.contains(&n) => { + preorder.skip_subtree() } - }; - if let Some(mut v) = replace.remove(&ele) { - preorder.skip_subtree(); - if !v.is_empty() { - v.reverse(); - return (None, v); - } - } - match ele { - SyntaxElement::Token(t) => return (Some(t), Vec::new()), - _ => {} + _ => (), } } - (None, Vec::new()) + None } } @@ -768,100 +558,79 @@ impl Converter { enum SynToken { Ordinary(SyntaxToken), // FIXME is this supposed to be `Punct`? - Punch(SyntaxToken, TextSize), - Synthetic(SyntheticToken), + Punct(SyntaxToken, usize), } impl SynToken { - fn token(&self) -> Option<&SyntaxToken> { + fn token(&self) -> &SyntaxToken { match self { - SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it), - SynToken::Synthetic(_) => None, + SynToken::Ordinary(it) | SynToken::Punct(it, _) => it, } } } -impl SrcToken for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { +impl SrcToken> for SynToken { + fn kind(&self, ctx: &Converter<'_, SpanAnchor>) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), - SynToken::Synthetic(token) => token.kind, + SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), } } - fn to_char(&self, _ctx: &Converter) -> Option { + fn to_char(&self, _ctx: &Converter<'_, SpanAnchor>) -> Option { match self { SynToken::Ordinary(_) => None, - SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), - SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(), - SynToken::Synthetic(_) => None, - } - } - fn to_text(&self, _ctx: &Converter) -> SmolStr { - match self { - SynToken::Ordinary(token) => token.text().into(), - SynToken::Punch(token, _) => token.text().into(), - SynToken::Synthetic(token) => token.text.clone(), + SynToken::Punct(it, i) => it.text().chars().nth(*i), } } - - fn synthetic_id(&self, _ctx: &Converter) -> Option { + fn to_text(&self, _ctx: &Converter<'_, SpanAnchor>) -> SmolStr { match self { - SynToken::Synthetic(token) => Some(token.id), - _ => None, + SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(), } } } -impl TokenConverter for Converter { +impl TokenConverter for Converter<'_, SpanAnchor> +where + SpanData: Span, +{ type Token = SynToken; fn convert_doc_comment( &self, token: &Self::Token, - span: tt::TokenId, - ) -> Option>> { - convert_doc_comment(token.token()?, span) + span: SpanData, + ) -> Option>>> { + convert_doc_comment(token.token(), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { if let Some((punct, offset)) = self.punct_offset.clone() { if usize::from(offset) + 1 < punct.text().len() { let offset = offset + TextSize::of('.'); let range = punct.text_range(); self.punct_offset = Some((punct.clone(), offset)); let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some((SynToken::Punch(punct, offset), range)); + return Some(( + SynToken::Punct(punct, u32::from(offset) as usize), + range - self.offset, + range, + )); } } - if let Some(synth_token) = self.current_synthetic.pop() { - if self.current_synthetic.is_empty() { - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; - } - let range = synth_token.range; - return Some((SynToken::Synthetic(synth_token), range)); - } - let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } - let (new_current, new_synth) = - Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); - self.current = new_current; - self.current_synthetic = new_synth; + self.current = Self::next_token(&mut self.preorder, &self.censored); let token = if curr.kind().is_punct() { self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - (SynToken::Punch(curr, 0.into()), range) + (SynToken::Punct(curr, 0 as usize), range - self.offset, range) } else { self.punct_offset = None; let range = curr.text_range(); - (SynToken::Ordinary(curr), range) + (SynToken::Ordinary(curr), range - self.offset, range) }; Some(token) @@ -871,54 +640,54 @@ impl TokenConverter for Converter { if let Some((punct, mut offset)) = self.punct_offset.clone() { offset += TextSize::of('.'); if usize::from(offset) < punct.text().len() { - return Some(SynToken::Punch(punct, offset)); + return Some(SynToken::Punct(punct, usize::from(offset))); } } - if let Some(synth_token) = self.current_synthetic.last() { - return Some(SynToken::Synthetic(synth_token.clone())); - } - let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } let token = if curr.kind().is_punct() { - SynToken::Punch(curr, 0.into()) + SynToken::Punct(curr, 0 as usize) } else { SynToken::Ordinary(curr) }; Some(token) } - fn id_alloc(&mut self) -> &mut TokenIdAlloc { - &mut self.id_alloc + fn anchor(&self) -> SpanAnchor { + self.file_id + } + fn span_for(&self, range: TextRange) -> Option> { + self.map.span_for_range(range) } } -struct TtTreeSink<'a> { +struct TtTreeSink<'a, SpanAnchor> { buf: String, - cursor: Cursor<'a, TokenId>, - open_delims: FxHashMap, + cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: TokenMap, + token_map: TokenMap>, } -impl<'a> TtTreeSink<'a> { - fn new(cursor: Cursor<'a, TokenId>) -> Self { +impl<'a, SpanAnchor> TtTreeSink<'a, SpanAnchor> +where + SpanData: Span, +{ + fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { buf: String::new(), cursor, - open_delims: FxHashMap::default(), text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), token_map: TokenMap::default(), } } - fn finish(mut self) -> (Parse, TokenMap) { + fn finish(mut self) -> (Parse, TokenMap>) { self.token_map.shrink_to_fit(); (self.inner.finish(), self.token_map) } @@ -936,7 +705,10 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl TtTreeSink<'_> { +impl TtTreeSink<'_, SpanAnchor> +where + SpanData: Span, +{ /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { @@ -991,7 +763,7 @@ impl TtTreeSink<'_> { break match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed - let (text, id) = match leaf { + let (text, span) = match leaf { tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span), tt::Leaf::Punct(punct) => { assert!(punct.char.is_ascii()); @@ -1004,7 +776,7 @@ impl TtTreeSink<'_> { tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), }; let range = TextRange::at(self.text_pos, TextSize::of(text)); - self.token_map.insert(id, range); + self.token_map.insert(range, span); self.cursor = self.cursor.bump(); text } @@ -1012,7 +784,8 @@ impl TtTreeSink<'_> { self.cursor = self.cursor.subtree().unwrap(); match delim_to_str(subtree.delimiter.kind, false) { Some(it) => { - self.open_delims.insert(subtree.delimiter.open, self.text_pos); + let range = TextRange::at(self.text_pos, TextSize::of(it)); + self.token_map.insert(range, subtree.delimiter.open); it } None => continue, @@ -1023,18 +796,8 @@ impl TtTreeSink<'_> { self.cursor = self.cursor.bump(); match delim_to_str(parent.delimiter.kind, true) { Some(it) => { - if let Some(open_delim) = - self.open_delims.get(&parent.delimiter.open) - { - let open_range = TextRange::at(*open_delim, TextSize::of('(')); - let close_range = - TextRange::at(self.text_pos, TextSize::of('(')); - self.token_map.insert_delim( - parent.delimiter.open, - open_range, - close_range, - ); - } + let range = TextRange::at(self.text_pos, TextSize::of(it)); + self.token_map.insert(range, parent.delimiter.close); it } None => continue, diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index fa0125f3e9e04..32dfb4d7e0b39 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -4,24 +4,32 @@ use syntax::{ast, AstNode}; use test_utils::extract_annotations; use tt::{ buffer::{TokenBuffer, TokenTreeRef}, - Leaf, Punct, Spacing, + Leaf, Punct, Spacing, Span, }; +use crate::syntax_bridge::SpanData; + use super::syntax_node_to_token_tree; fn check_punct_spacing(fixture: &str) { + #[derive(PartialEq, Eq, Clone, Copy, Debug)] + struct DummyFile; + impl Span for DummyFile { + const DUMMY: Self = DummyFile; + } + let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax()); + let subtree = + syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default()); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { - let token = token_map.token_by_range(range).expect("no token found"); let spacing = match annotation.as_str() { "Alone" => Spacing::Alone, "Joint" => Spacing::Joint, a => panic!("unknown annotation: {a}"), }; - (token, spacing) + (range, spacing) }) .collect(); @@ -29,8 +37,12 @@ fn check_punct_spacing(fixture: &str) { let mut cursor = buf.begin(); while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { - if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(span) { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { assert_eq!(expected, *spacing); } } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 73a27df5dbca6..1af50c8b3b950 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,123 +2,121 @@ use std::hash::Hash; -use parser::{SyntaxKind, T}; -use syntax::{TextRange, TextSize}; - -use crate::syntax_bridge::SyntheticTokenId; - -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange), +use syntax::TextRange; +use tt::Span; + +// pub type HirFile = u32; +// pub type FileRange = (HirFile, TextRange); +// Option, LocalSyntaxContet +// pub type SyntaxContext = (); +// pub type LocalSyntaxContext = u32; + +/// Maps absolute text ranges for the corresponding file to the relevant span data. +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +// FIXME: Rename to SpanMap +pub struct TokenMap { + // FIXME: This needs to be sorted by (FileId, AstId) + // Then we can do a binary search on the file id, + // then a bin search on the ast id + pub span_map: Vec<(TextRange, S)>, + // span_map2: rustc_hash::FxHashMap, } -impl TokenTextRange { - fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(it) => match kind { - T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), - T!['}'] | T![')'] | T![']'] => { - Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) - } - _ => None, - }, - } +impl Default for TokenMap { + fn default() -> Self { + Self { span_map: Vec::new() } } } -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, - pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>, -} - -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(it) => { - let open = TextRange::at(it.start(), 1.into()); - let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); - open == relative_range || close == relative_range - } - })?; - Some(token_id) - } - - pub fn ranges_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> impl Iterator + '_ { - self.entries - .iter() - .filter(move |&&(tid, _)| tid == token_id) - .filter_map(move |(_, range)| range.by_kind(kind)) - } - - pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { - self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) - } - - pub fn first_range_by_token( - &self, - token_id: tt::TokenId, - kind: SyntaxKind, - ) -> Option { - self.ranges_by_token(token_id, kind).next() - } - +impl TokenMap { pub(crate) fn shrink_to_fit(&mut self) { - self.entries.shrink_to_fit(); - self.synthetic_entries.shrink_to_fit(); - } - - pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); - } - - pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { - self.synthetic_entries.push((token_id, id)); + self.span_map.shrink_to_fit(); } - pub(crate) fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - let cover = open_relative_range.cover(close_relative_range); - - self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - res + pub(crate) fn insert(&mut self, range: TextRange, span: S) { + self.span_map.push((range, span)); } - pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim) = token_text_range { - let cover = dim.cover(close_relative_range); - *token_text_range = TokenTextRange::Delimiter(cover); - } - } - - pub(crate) fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidentally quadratic - self.entries.remove(idx); + pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + self.span_map.iter().filter_map( + move |(range, s)| { + if s == &span { + Some(*range) + } else { + None + } + }, + ) } - pub fn entries(&self) -> impl Iterator + '_ { - self.entries.iter().filter_map(|&(tid, tr)| match tr { - TokenTextRange::Token(range) => Some((tid, range)), - TokenTextRange::Delimiter(_) => None, - }) + pub fn span_for_range(&self, range: TextRange) -> Option { + self.span_map.iter().find_map(|(r, s)| if r == &range { Some(s.clone()) } else { None }) } - pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { - self.entries.retain(|&(tid, _)| id(tid)); - } + // pub fn ranges_by_token( + // &self, + // token_id: tt::TokenId, + // kind: SyntaxKind, + // ) -> impl Iterator + '_ { + // self.entries + // .iter() + // .filter(move |&&(tid, _)| tid == token_id) + // .filter_map(move |(_, range)| range.by_kind(kind)) + // } + + // pub(crate) fn remove_delim(&mut self, idx: usize) { + // // FIXME: This could be accidentally quadratic + // self.entries.remove(idx); + // } + + // pub fn entries(&self) -> impl Iterator + '_ { + // self.entries.iter().filter_map(|&(tid, tr)| match tr { + // TokenTextRange::Token(range) => Some((tid, range)), + // TokenTextRange::Delimiter(_) => None, + // }) + // } + + // pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { + // self.entries.retain(|&(tid, _)| id(tid)); + // } + // pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { + // self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) + // } + + // pub fn first_range_by_token( + // &self, + // token_id: tt::TokenId, + // kind: SyntaxKind, + // ) -> Option { + // self.ranges_by_token(token_id, kind).next() + // } + + // pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { + // self.entries.push((token_id, TokenTextRange::Token(relative_range))); + // } + + // pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { + // self.synthetic_entries.push((token_id, id)); + // } + + // pub(crate) fn insert_delim( + // &mut self, + // token_id: tt::TokenId, + // open_relative_range: TextRange, + // close_relative_range: TextRange, + // ) -> usize { + // let res = self.entries.len(); + // let cover = open_relative_range.cover(close_relative_range); + + // self.entries.push((token_id, TokenTextRange::Delimiter(cover))); + // res + // } + + // pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + // let (_, token_text_range) = &mut self.entries[idx]; + // if let TokenTextRange::Delimiter(dim) = token_text_range { + // let cover = dim.cover(close_relative_range); + // *token_text_range = TokenTextRange::Delimiter(cover); + // } + // } } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 2c2d2e8a94525..4c87c89adde97 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -31,5 +31,6 @@ paths.workspace = true tt.workspace = true stdx.workspace = true profile.workspace = true +text-size.workspace = true # Intentionally *not* depend on anything salsa-related # base-db.workspace = true diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index fe82b8d045420..bfb3213a25d96 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -38,6 +38,7 @@ use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; +use text_size::TextRange; use tt::Span; use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; @@ -55,6 +56,19 @@ impl SerializableSpan<1> for tt::TokenId { } } +impl SerializableSpan<3> for tt::SpanData +where + FileId: From + Into, + Self: Span, +{ + fn into_u32(self) -> [u32; 3] { + [self.anchor.into(), self.range.start().into(), self.range.end().into()] + } + fn from_u32([file_id, start, end]: [u32; 3]) -> Self { + tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) } + } +} + #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { subtree: Vec, diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index c7b84c41b33e9..b6dcc26de6210 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,17 +209,24 @@ mod tests { use super::*; use cfg::CfgExpr; + use hir::HirFileId; + use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}; use mbe::syntax_node_to_token_tree; use syntax::{ ast::{self, AstNode}, - SmolStr, + SmolStr, TextSize, }; fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let (tt, _) = syntax_node_to_token_tree(tt.syntax()); + let tt = syntax_node_to_token_tree( + tt.syntax(), + SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID }, + TextSize::new(0), + &Default::default(), + ); CfgExpr::parse(&tt) }; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 0f6539f224d79..7e795cf463233 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -8,7 +8,7 @@ use std::{ use hir::{ db::{DefDatabase, ExpandDatabase, HirDatabase}, - Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name, + Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name, }; use hir_def::{ body::{BodySourceMap, SyntheticSyntax}, diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 8541be715a938..abec2679464ea 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -4,7 +4,7 @@ use project_model::{CargoConfig, RustLibSource}; use rustc_hash::FxHashSet; -use hir::{db::HirDatabase, Crate, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity}; use ide_db::base_db::SourceDatabaseExt; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 4939ab39049c0..1d7b7de390e4b 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -75,7 +75,7 @@ pub use smol_str::SmolStr; #[derive(Debug, PartialEq, Eq)] pub struct Parse { green: GreenNode, - errors: Arc<[SyntaxError]>, + errors: Option>, _ty: PhantomData T>, } @@ -87,14 +87,18 @@ impl Clone for Parse { impl Parse { fn new(green: GreenNode, errors: Vec) -> Parse { - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } pub fn syntax_node(&self) -> SyntaxNode { SyntaxNode::new_root(self.green.clone()) } pub fn errors(&self) -> &[SyntaxError] { - &self.errors + self.errors.as_deref().unwrap_or_default() } } @@ -108,10 +112,9 @@ impl Parse { } pub fn ok(self) -> Result> { - if self.errors.is_empty() { - Ok(self.tree()) - } else { - Err(self.errors) + match self.errors { + Some(e) => Err(e), + None => Ok(self.tree()), } } } @@ -129,7 +132,7 @@ impl Parse { impl Parse { pub fn debug_dump(&self) -> String { let mut buf = format!("{:#?}", self.tree().syntax()); - for err in self.errors.iter() { + for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) { format_to!(buf, "error {:?}: {}\n", err.range(), err); } buf @@ -141,13 +144,16 @@ impl Parse { fn incremental_reparse(&self, indel: &Indel) -> Option> { // FIXME: validation errors are not handled here - parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map( - |(green_node, errors, _reparsed_range)| Parse { - green: green_node, - errors: errors.into(), - _ty: PhantomData, - }, + parsing::incremental_reparse( + self.tree().syntax(), + indel, + self.errors.as_deref().unwrap_or_default().iter().cloned(), ) + .map(|(green_node, errors, _reparsed_range)| Parse { + green: green_node, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + }) } fn full_reparse(&self, indel: &Indel) -> Parse { @@ -168,7 +174,11 @@ impl SourceFile { errors.extend(validation::validate(&root)); assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } @@ -275,7 +285,11 @@ impl ast::TokenTree { let (green, errors) = builder.finish_raw(); - Parse { green, errors: errors.into(), _ty: PhantomData } + Parse { + green, + errors: if errors.is_empty() { None } else { Some(errors.into()) }, + _ty: PhantomData, + } } } diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 45e5916098282..0ddc641711fb9 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs @@ -20,7 +20,7 @@ use crate::{ pub(crate) fn incremental_reparse( node: &SyntaxNode, edit: &Indel, - errors: Vec, + errors: impl IntoIterator, ) -> Option<(GreenNode, Vec, TextRange)> { if let Some((green, new_errors, old_range)) = reparse_token(node, edit) { return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); @@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool { } fn merge_errors( - old_errors: Vec, + old_errors: impl IntoIterator, new_errors: Vec, range_before_reparse: TextRange, edit: &Indel, @@ -191,8 +191,12 @@ mod tests { let fully_reparsed = SourceFile::parse(&after); let incrementally_reparsed: Parse = { let before = SourceFile::parse(&before); - let (green, new_errors, range) = - incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); + let (green, new_errors, range) = incremental_reparse( + before.tree().syntax(), + &edit, + before.errors.as_deref().unwrap_or_default().iter().cloned(), + ) + .unwrap(); assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); Parse::new(green, new_errors) }; diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 3010d77d827e1..8ae1242cf7fd4 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs @@ -38,7 +38,7 @@ fn benchmark_parser() { let tree = { let _b = bench("parsing"); let p = SourceFile::parse(&data); - assert!(p.errors.is_empty()); + assert!(p.errors.is_none()); assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); p.tree() }; diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml index a28ee5f1ca2bb..57222449790ec 100644 --- a/crates/tt/Cargo.toml +++ b/crates/tt/Cargo.toml @@ -13,5 +13,6 @@ doctest = false [dependencies] smol_str.workspace = true +text-size.workspace = true stdx.workspace = true diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index a4ffc328f2171..89cb12d2c2670 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -7,6 +7,7 @@ use std::fmt; use stdx::impl_from; +use text_size::{TextRange, TextSize}; pub use smol_str::SmolStr; @@ -31,36 +32,25 @@ impl TokenId { Self::UNSPECIFIED } } - -pub mod token_id { - pub use crate::{DelimiterKind, Spacing, TokenId}; - pub type Span = crate::TokenId; - pub type Subtree = crate::Subtree; - pub type Punct = crate::Punct; - pub type Delimiter = crate::Delimiter; - pub type Leaf = crate::Leaf; - pub type Ident = crate::Ident; - pub type Literal = crate::Literal; - pub type TokenTree = crate::TokenTree; - pub mod buffer { - pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>; - pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>; - pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>; - } +impl Span for TokenId { + const DUMMY: Self = TokenId(!0); } -pub trait Span: std::fmt::Debug + Copy + Sized { - const DUMMY: Self; - fn is_dummy(&self) -> bool; +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub struct SpanData { + /// The text range of this span, relative to the anchor. + pub range: TextRange, + pub anchor: Anchor, } -impl Span for TokenId { - const DUMMY: Self = TokenId(!0); - fn is_dummy(&self) -> bool { - *self == Self::DUMMY - } +impl Span for SpanData { + const DUMMY: Self = + SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY }; } +pub trait Span: std::fmt::Debug + Copy + Sized + Eq { + const DUMMY: Self; +} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct SyntaxContext(pub u32); @@ -134,7 +124,6 @@ impl Delimiter { } } - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum DelimiterKind { Parenthesis, From e36b3f7b8cabcf34b89a2be14eff474815476590 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 6 Oct 2023 14:47:11 +0200 Subject: [PATCH 05/80] Proper span representation with syntax context --- crates/base-db/src/span.rs | 16 +- crates/cfg/src/tests.rs | 37 +- crates/hir-def/src/attr/tests.rs | 16 +- crates/hir-expand/src/attrs.rs | 2 +- crates/hir-expand/src/builtin_fn_macro.rs | 3 +- crates/hir-expand/src/db.rs | 26 +- crates/hir-expand/src/hygiene.rs | 242 +++-------- crates/hir-expand/src/lib.rs | 40 +- crates/hir-expand/src/quote.rs | 4 +- crates/mbe/src/benchmark.rs | 36 +- crates/mbe/src/syntax_bridge.rs | 376 +++++++++--------- crates/mbe/src/syntax_bridge/tests.rs | 14 +- crates/proc-macro-api/src/msg.rs | 7 +- crates/proc-macro-api/src/msg/flat.rs | 31 +- crates/rust-analyzer/src/cargo_target_spec.rs | 4 +- crates/tt/src/lib.rs | 28 +- 16 files changed, 413 insertions(+), 469 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 1072d937a3bd2..acc1e5243f731 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -1,6 +1,7 @@ use std::fmt; use salsa::InternId; +use tt::SyntaxContext; use vfs::FileId; pub type ErasedFileAstId = la_arena::Idx; @@ -9,10 +10,17 @@ pub type ErasedFileAstId = la_arena::Idx; pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub struct SyntaxContext; +pub type SpanData = tt::SpanData; -pub type SpanData = tt::SpanData; +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct SyntaxContextId(InternId); +crate::impl_intern_key!(SyntaxContextId); + +impl SyntaxContext for SyntaxContextId { + // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context + // currently (which kind of makes sense but we need it here!) + const DUMMY: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); +} #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct SpanAnchor { @@ -26,7 +34,7 @@ impl fmt::Debug for SpanAnchor { } } -impl tt::Span for SpanAnchor { +impl tt::SpanAnchor for SpanAnchor { const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; } diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 242929c006a5b..0ea176858c944 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -2,20 +2,30 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; -use tt::Span; +use tt::{SpanAnchor, SyntaxContext}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; #[derive(Debug, Copy, Clone, PartialEq, Eq)] struct DummyFile; -impl Span for DummyFile { +impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +struct DummyCtx; +impl SyntaxContext for DummyCtx { + const DUMMY: Self = DummyCtx; +} fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); + let tt = syntax_node_to_token_tree::<_, DummyCtx>( + tt.syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } @@ -23,7 +33,12 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); + let tt = syntax_node_to_token_tree::<_, DummyCtx>( + tt.syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); @@ -32,7 +47,12 @@ fn check_dnf(input: &str, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); + let tt = syntax_node_to_token_tree::<_, DummyCtx>( + tt.syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -43,7 +63,12 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), DummyFile, 0.into(), &Default::default()); + let tt = syntax_node_to_token_tree::<_, DummyCtx>( + tt.syntax(), + DummyFile, + 0.into(), + &Default::default(), + ); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index ad101e9bdf776..60e5cebd3cb06 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -4,15 +4,25 @@ use base_db::span::SpanAnchor; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; -use tt::Span; +use tt::{SpanAnchor as _, SyntaxContext}; use crate::attr::{DocAtom, DocExpr}; +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +struct DummyCtx; +impl SyntaxContext for DummyCtx { + const DUMMY: Self = DummyCtx; +} + fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = - syntax_node_to_token_tree(tt.syntax(), SpanAnchor::DUMMY, 0.into(), &Default::default()); + let tt = syntax_node_to_token_tree::<_, DummyCtx>( + tt.syntax(), + SpanAnchor::DUMMY, + 0.into(), + &Default::default(), + ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 9652dd345a8d1..01a66cd03ab1d 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,7 +1,7 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use ::tt::Span; +use ::tt::SpanAnchor as _; use base_db::{span::SpanAnchor, CrateId}; use cfg::CfgExpr; use either::Either; diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index adbe49473aceb..2a541a36735b6 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,5 +1,6 @@ //! Builtin macro +use ::tt::Span; use base_db::{ span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, AnchoredPath, Edition, FileId, @@ -15,7 +16,7 @@ use syntax::{ use crate::{ db::ExpandDatabase, name, quote, - tt::{self, Span}, + tt::{self}, EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 32ba7b2f91155..1a68653a6fcd1 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,8 +1,9 @@ //! Defines database & queries for macro expansion. +use ::tt::SyntaxContext; use base_db::{ salsa, - span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, CrateId, Edition, SourceDatabase, }; use either::Either; @@ -15,11 +16,13 @@ use syntax::{ use triomphe::Arc; use crate::{ - ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, - BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, - ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, - MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, SyntaxContext, SyntaxContextId, + ast_id_map::AstIdMap, + builtin_attr_macro::pseudo_derive_attr_expansion, + builtin_fn_macro::EagerExpander, + hygiene::{self, HygieneFrame, SyntaxContextData}, + tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, + ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, + MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -89,7 +92,15 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::interned] fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; #[salsa::interned] - fn intern_syntax_context(&self, ctx: SyntaxContext) -> SyntaxContextId; + fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId; + #[salsa::transparent] + #[salsa::invoke(hygiene::apply_mark)] + fn apply_mark( + &self, + ctxt: SyntaxContextData, + file_id: HirFileId, + transparency: hygiene::Transparency, + ) -> SyntaxContextId; /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned @@ -225,6 +236,7 @@ pub fn expand_speculative( .ranges_with_span(tt::SpanData { range: token_to_map.text_range(), anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::DUMMY, }) .filter_map(|range| syntax_node.covering_element(range).into_token()) .min_by_key(|t| { diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index ce421d3dcd825..e0688178ffb92 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -2,71 +2,92 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. -use base_db::CrateId; -use db::TokenExpander; +use base_db::{span::SyntaxContextId, CrateId}; use either::Either; use syntax::{ - ast::{self, HasDocComments}, - AstNode, SyntaxNode, TextRange, TextSize, + ast::{self}, + TextRange, }; use triomphe::Arc; use crate::{ - db::{self, ExpandDatabase}, + db::ExpandDatabase, name::{AsName, Name}, - HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, SpanMap, + HirFileId, InFile, }; -#[derive(Clone, Debug)] -pub struct Hygiene { - frames: Option, +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct SyntaxContextData { + // FIXME: This might only need to be Option? + outer_expn: HirFileId, + outer_transparency: Transparency, + parent: SyntaxContextId, + /// This context, but with all transparent and semi-transparent expansions filtered away. + opaque: SyntaxContextId, + /// This context, but with all transparent expansions filtered away. + opaque_and_semitransparent: SyntaxContextId, + /// Name of the crate to which `$crate` with this context would resolve. + dollar_crate_name: Name, +} + +/// A property of a macro expansion that determines how identifiers +/// produced by that expansion are resolved. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] +pub enum Transparency { + /// Identifier produced by a transparent expansion is always resolved at call-site. + /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. + Transparent, + /// Identifier produced by a semi-transparent expansion may be resolved + /// either at call-site or at definition-site. + /// If it's a local variable, label or `$crate` then it's resolved at def-site. + /// Otherwise it's resolved at call-site. + /// `macro_rules` macros behave like this, built-in macros currently behave like this too, + /// but that's an implementation detail. + SemiTransparent, + /// Identifier produced by an opaque expansion is always resolved at definition-site. + /// Def-site spans in procedural macros, identifiers from `macro` by default use this. + Opaque, +} + +pub(super) fn apply_mark( + _db: &dyn ExpandDatabase, + _ctxt: SyntaxContextData, + _file_id: HirFileId, + _transparency: Transparency, +) -> SyntaxContextId { + _db.intern_syntax_context(_ctxt) } +// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) { +// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) +// } +// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) { +// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) +// } + +#[derive(Clone, Debug)] +pub struct Hygiene {} + impl Hygiene { - pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene { - Hygiene { frames: Some(HygieneFrames::new(db, file_id)) } + pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene { + Hygiene {} } pub fn new_unhygienic() -> Hygiene { - Hygiene { frames: None } + Hygiene {} } // FIXME: this should just return name pub fn name_ref_to_name( &self, - db: &dyn ExpandDatabase, + _: &dyn ExpandDatabase, name_ref: ast::NameRef, ) -> Either { - if let Some(frames) = &self.frames { - if name_ref.text() == "$crate" { - if let Some(krate) = frames.root_crate(db, name_ref.syntax()) { - return Either::Right(krate); - } - } - } - Either::Left(name_ref.as_name()) } - pub fn local_inner_macros(&self, _db: &dyn ExpandDatabase, path: ast::Path) -> Option { - let mut _token = path.syntax().first_token()?.text_range(); - let frames = self.frames.as_ref()?; - let mut _current = &frames.0; - - // FIXME: Hygiene ... - return None; - // loop { - // let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; - // if origin == Origin::Def { - // return if current.local_inner { - // frames.root_crate(db, path.syntax()) - // } else { - // None - // }; - // } - // current = current.call_site.as_ref()?; - // token = mapped.value; - // } + pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option { + None } } @@ -74,150 +95,19 @@ impl Hygiene { struct HygieneFrames(Arc); #[derive(Clone, Debug, Eq, PartialEq)] -pub struct HygieneFrame { - expansion: Option, - - // Indicate this is a local inner macro - local_inner: bool, - krate: Option, - - call_site: Option>, - def_site: Option>, -} - -impl HygieneFrames { - fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self { - // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory - // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work. - HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) - } - - fn root_crate(&self, _db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option { - let mut _token = node.first_token()?.text_range(); - let mut _result = self.0.krate; - let mut _current = self.0.clone(); - - return None; - - // while let Some((mapped, origin)) = - // current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) - // { - // result = current.krate; - - // let site = match origin { - // Origin::Def => ¤t.def_site, - // Origin::Call => ¤t.call_site, - // }; - - // let site = match site { - // None => break, - // Some(it) => it, - // }; - - // current = site.clone(); - // token = mapped.value; - // } - - // result - } -} +pub struct HygieneFrame {} #[derive(Debug, Clone, PartialEq, Eq)] -struct HygieneInfo { - file: MacroFile, - /// The start offset of the `macro_rules!` arguments or attribute input. - attr_input_or_mac_def_start: Option>, - - macro_def: TokenExpander, - macro_arg: Arc, - exp_map: Arc, -} +struct HygieneInfo {} impl HygieneInfo { - fn _map_ident_up( - &self, - _db: &dyn ExpandDatabase, - _token: TextRange, - ) -> Option> { - // self.exp_map.token_by_range(token).map(|span| InFile::new(span.anchor, span.range)) + fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option> { None } } -fn make_hygiene_info( - db: &dyn ExpandDatabase, - macro_file: MacroFile, - loc: &MacroCallLoc, -) -> HygieneInfo { - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - let macro_def = db.macro_expander(loc.def); - let (_, exp_map) = db.parse_macro_expansion(macro_file).value; - let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) - }); - - HygieneInfo { - file: macro_file, - attr_input_or_mac_def_start: attr_input_or_mac_def - .map(|it| it.map(|tt| tt.syntax().text_range().start())), - macro_arg, - macro_def, - exp_map, - } -} - impl HygieneFrame { - pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame { - let (info, krate, local_inner) = match file_id.macro_file() { - None => (None, None, false), - Some(macro_file) => { - let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id())); - match loc.def.kind { - MacroDefKind::Declarative(_) => { - (info, Some(loc.def.krate), loc.def.local_inner) - } - MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false), - MacroDefKind::BuiltInAttr(..) => (info, None, false), - MacroDefKind::BuiltInDerive(..) => (info, None, false), - MacroDefKind::BuiltInEager(..) => (info, None, false), - MacroDefKind::ProcMacro(..) => (info, None, false), - } - } - }; - - let Some((info, calling_file)) = info else { - return HygieneFrame { - expansion: None, - local_inner, - krate, - call_site: None, - def_site: None, - }; - }; - - let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id)); - let call_site = Some(db.hygiene_frame(calling_file)); - - HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site } + pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame { + HygieneFrame {} } } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bd5796e000ac7..ae07cf4b151bd 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -51,7 +51,7 @@ pub type DeclarativeMacro = ::mbe::DeclarativeMacro; pub mod tt { pub use base_db::span::SpanData; - pub use tt::{DelimiterKind, Spacing, Span}; + pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; pub type Delimiter = ::tt::Delimiter; pub type Subtree = ::tt::Subtree; @@ -97,44 +97,6 @@ impl fmt::Display for ExpandError { } } -/// `MacroCallId` identifies a particular macro invocation, like -/// `println!("Hello, {}", world)`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct SyntaxContextId(base_db::salsa::InternId); -base_db::impl_intern_key!(SyntaxContextId); - -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub struct SyntaxContext { - outer_expn: HirFileId, - outer_transparency: Transparency, - parent: SyntaxContextId, - /// This context, but with all transparent and semi-transparent expansions filtered away. - opaque: SyntaxContextId, - /// This context, but with all transparent expansions filtered away. - opaque_and_semitransparent: SyntaxContextId, - /// Name of the crate to which `$crate` with this context would resolve. - dollar_crate_name: name::Name, -} - -/// A property of a macro expansion that determines how identifiers -/// produced by that expansion are resolved. -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)] -pub enum Transparency { - /// Identifier produced by a transparent expansion is always resolved at call-site. - /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this. - Transparent, - /// Identifier produced by a semi-transparent expansion may be resolved - /// either at call-site or at definition-site. - /// If it's a local variable, label or `$crate` then it's resolved at def-site. - /// Otherwise it's resolved at call-site. - /// `macro_rules` macros behave like this, built-in macros currently behave like this too, - /// but that's an implementation detail. - SemiTransparent, - /// Identifier produced by an opaque expansion is always resolved at definition-site. - /// Def-site spans in procedural macros, identifiers from `macro` by default use this. - Opaque, -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { pub def: MacroDefId, diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 9dd4965c15066..44f20cbd92d25 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -247,8 +247,8 @@ mod tests { assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); expect![[r#" - SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } } - IDENT hello SpanData { range: 0..0, anchor: SpanAnchor { file_id: FileId(0), ast_id: Idx::>(0) } }"#]].assert_eq(&t); + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); } #[test] diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 4f60e90773ee3..2ead71645639a 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,19 +6,27 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; -use tt::{Span, SpanData}; +use tt::{Span, SpanAnchor, SyntaxContext}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, syntax_node_to_token_tree, DeclarativeMacro, }; +type SpanData = tt::SpanData; + #[derive(PartialEq, Eq, Clone, Copy, Debug)] struct DummyFile; -impl Span for DummyFile { +impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; } +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +struct DummyCtx; +impl SyntaxContext for DummyCtx { + const DUMMY: Self = DummyCtx; +} + #[test] fn benchmark_parse_macro_rules() { if skip_slow_tests() { @@ -54,14 +62,14 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap>> { +fn macro_rules_fixtures() -> FxHashMap> { macro_rules_fixtures_tt() .into_iter() .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap>> { +fn macro_rules_fixtures_tt() -> FxHashMap> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -84,8 +92,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap>>, -) -> Vec<(String, tt::Subtree>)> { + rules: &FxHashMap>, +) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -130,11 +138,7 @@ fn invocation_fixtures( } return res; - fn collect_from_op( - op: &Op>, - parent: &mut tt::Subtree>, - seed: &mut usize, - ) { + fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -220,20 +224,20 @@ fn invocation_fixtures( *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree> { + fn make_ident(ident: &str) -> tt::TokenTree { tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into() } - fn make_punct(char: char) -> tt::TokenTree> { + fn make_punct(char: char) -> tt::TokenTree { tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone }) .into() } - fn make_literal(lit: &str) -> tt::TokenTree> { + fn make_literal(lit: &str) -> tt::TokenTree { tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into() } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>>>, - ) -> tt::TokenTree> { + token_trees: Option>>, + ) -> tt::TokenTree { tt::Subtree { delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind }, token_trees: token_trees.unwrap_or_default(), diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index c8c2e5dcd55a0..ab272862cdb50 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -9,7 +9,7 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - Span, SpanData, + Span, SpanData, SyntaxContext, }; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; @@ -23,33 +23,37 @@ mod tests; /// to relative spans, relative to the passed anchor. /// `map` is used to resolve the converted spans accordingly. /// TODO: Flesh out the doc comment more thoroughly -pub fn syntax_node_to_token_tree( +pub fn syntax_node_to_token_tree( node: &SyntaxNode, - anchor: SpanAnchor, + anchor: Anchor, anchor_offset: TextSize, - map: &TokenMap>, -) -> tt::Subtree> + map: &TokenMap>, +) -> tt::Subtree> where - SpanData: Span, + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, { assert!(anchor_offset <= node.text_range().start()); - let mut c = Converter::new(node, anchor_offset, anchor, vec![], map); - convert_tokens(&mut c) + let mut c = Converter::new(node, anchor_offset, vec![], map); + convert_tokens(&mut c, anchor) } -pub fn syntax_node_to_token_tree_censored( +pub fn syntax_node_to_token_tree_censored( node: &SyntaxNode, - anchor: SpanAnchor, + anchor: Anchor, anchor_offset: TextSize, - map: &TokenMap>, + map: &TokenMap>, censored: Vec, -) -> tt::Subtree> +) -> tt::Subtree> where - SpanData: Span, + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, { assert!(anchor_offset <= node.text_range().start()); - let mut c = Converter::new(node, anchor_offset, anchor, censored, map); - convert_tokens(&mut c) + let mut c = Converter::new(node, anchor_offset, censored, map); + convert_tokens(&mut c, anchor) } // The following items are what `rustc` macro can be parsed into : @@ -64,12 +68,14 @@ where // * AssocItems(SmallVec<[ast::AssocItem; 1]>) // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> -pub fn token_tree_to_syntax_node( - tt: &tt::Subtree>, +pub fn token_tree_to_syntax_node( + tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, TokenMap>) +) -> (Parse, TokenMap>) where - SpanData: Span, + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, { let buffer = match tt { tt::Subtree { @@ -97,36 +103,42 @@ where tree_sink.finish() } -pub fn map_from_syntax_node( +pub fn map_from_syntax_node( node: &SyntaxNode, - anchor: SpanAnchor, + anchor: Anchor, anchor_offset: TextSize, -) -> TokenMap> +) -> TokenMap> where - SpanAnchor: Copy, - SpanData: Span, + Anchor: Copy, + SpanData: Span, + Ctx: SyntaxContext, { let mut map = TokenMap::default(); node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { - map.insert(t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor }); + map.insert( + t.text_range(), + SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY }, + ); }); map } /// Convert a string to a `TokenTree` -pub fn parse_to_token_tree( +pub fn parse_to_token_tree( text: &str, - file_id: SpanAnchor, -) -> Option>> + anchor: Anchor, +) -> Option>> where - SpanData: Span, + SpanData: Span, + Anchor: Copy, + Ctx: SyntaxContext, { let lexed = parser::LexedStr::new(text); if lexed.errors().next().is_some() { return None; } - let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default(), file_id }; - Some(convert_tokens(&mut conv)) + let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default() }; + Some(convert_tokens(&mut conv, anchor)) } /// Split token tree with separate expr: $($e:expr)SEP* @@ -166,134 +178,141 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec>( +fn convert_tokens( conv: &mut C, -) -> tt::Subtree> + anchor: Anchor, +) -> tt::Subtree> where - SpanData: Span, - SpanAnchor: Copy, + C: TokenConverter, + Ctx: SyntaxContext, + SpanData: Span, + Anchor: Copy, { - let entry = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }; + let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); - let anchor = conv.anchor(); - loop { - let subtree = stack.last_mut(); - let result = &mut subtree.token_trees; - let Some((token, rel_range, abs_range)) = conv.bump() else { break }; + while let Some((token, rel_range, abs_range)) = conv.bump() { + let tt::Subtree { delimiter, token_trees: result } = stack.last_mut(); + let mk_dummy_span = || SpanData { range: rel_range, anchor, ctx: Ctx::DUMMY }; let kind = token.kind(conv); - if kind == COMMENT { - if let Some(tokens) = conv.convert_doc_comment( - &token, - conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), - ) { - result.extend(tokens); + + let tt = match kind { + // Desugar doc comments into doc attributes + COMMENT => { + let span = conv.span_for(abs_range).unwrap_or_else(mk_dummy_span); + if let Some(tokens) = conv.convert_doc_comment(&token, span) { + result.extend(tokens); + } + continue; } - continue; - } - let tt = if kind.is_punct() && kind != UNDERSCORE { - let expected = match subtree.delimiter.kind { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; + _ if kind.is_punct() && kind != UNDERSCORE => { + let expected = match delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + }; - if let Some(expected) = expected { - if kind == expected { + // Current token is a closing delimiter that we expect, fix up the closing span + // and end the subtree here + if matches!(expected, Some(expected) if expected == kind) { if let Some(mut subtree) = stack.pop() { - subtree.delimiter.close = conv - .span_for(abs_range) - .unwrap_or(SpanData { range: rel_range, anchor }); + subtree.delimiter.close = + conv.span_for(abs_range).unwrap_or_else(mk_dummy_span); stack.last_mut().token_trees.push(subtree.into()); } continue; } - } - - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; - if let Some(kind) = delim { - let subtree = tt::Subtree { - delimiter: tt::Delimiter { - // FIXME: Open and close spans - open: conv - .span_for(abs_range) - .unwrap_or(SpanData { range: rel_range, anchor }), - close: Span::DUMMY, - kind, - }, - token_trees: vec![], + let delim = match kind { + T!['('] => Some(tt::DelimiterKind::Parenthesis), + T!['{'] => Some(tt::DelimiterKind::Brace), + T!['['] => Some(tt::DelimiterKind::Bracket), + _ => None, }; - stack.push(subtree); - continue; - } - let spacing = match conv.peek().map(|next| next.kind(conv)) { - Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, - _ => tt::Spacing::Alone, - }; - let char = match token.to_char(conv) { - Some(c) => c, - None => { - panic!("Token from lexer must be single char: token = {token:#?}"); + // Start a new subtree + if let Some(kind) = delim { + stack.push(tt::Subtree { + delimiter: tt::Delimiter { + open: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), + // will be overwritten on subtree close above + close: mk_dummy_span(), + kind, + }, + token_trees: vec![], + }); + continue; } - }; - tt::Leaf::from(tt::Punct { - char, - spacing, - span: conv.span_for(abs_range).unwrap_or(SpanData { range: rel_range, anchor }), - }) - .into() - } else { - macro_rules! make_leaf { - ($i:ident) => { - tt::$i { - span: conv - .span_for(abs_range) - .unwrap_or(SpanData { range: rel_range, anchor }), - text: token.to_text(conv), - } - .into() + + let spacing = match conv.peek().map(|next| next.kind(conv)) { + Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, + _ => tt::Spacing::Alone, }; + let Some(char) = token.to_char(conv) else { + panic!("Token from lexer must be single char: token = {token:#?}") + }; + tt::Leaf::from(tt::Punct { + char, + spacing, + span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), + }) + .into() } - let leaf: tt::Leaf<_> = match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - // FIXME: Check whether span splitting works as intended - LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(rel_range.start(), char_unit); - let apostrophe = tt::Leaf::from(tt::Punct { - char: '\'', - spacing: tt::Spacing::Joint, - span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), - }); - result.push(apostrophe.into()); - - let r = - TextRange::at(rel_range.start() + char_unit, rel_range.len() - char_unit); - let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.span_for(abs_range).unwrap_or(SpanData { range: r, anchor }), - }); - result.push(ident.into()); - continue; + _ => { + macro_rules! make_leaf { + ($i:ident) => { + tt::$i { + span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), + text: token.to_text(conv), + } + .into() + }; } - _ => continue, - }; + let leaf: tt::Leaf<_> = match kind { + T![true] | T![false] => make_leaf!(Ident), + IDENT => make_leaf!(Ident), + UNDERSCORE => make_leaf!(Ident), + k if k.is_keyword() => make_leaf!(Ident), + k if k.is_literal() => make_leaf!(Literal), + // FIXME: Check whether span splitting works as intended + LIFETIME_IDENT => { + let char_unit = TextSize::of('\''); + let r = TextRange::at(rel_range.start(), char_unit); + let apostrophe = tt::Leaf::from(tt::Punct { + char: '\'', + spacing: tt::Spacing::Joint, + span: conv.span_for(abs_range).unwrap_or(SpanData { + range: r, + anchor, + ctx: Ctx::DUMMY, + }), + }); + result.push(apostrophe.into()); + + let r = TextRange::at( + rel_range.start() + char_unit, + rel_range.len() - char_unit, + ); + let ident = tt::Leaf::from(tt::Ident { + text: SmolStr::new(&token.to_text(conv)[1..]), + span: conv.span_for(abs_range).unwrap_or(SpanData { + range: r, + anchor, + ctx: Ctx::DUMMY, + }), + }); + result.push(ident.into()); + continue; + } + _ => continue, + }; - leaf.into() + leaf.into() + } }; + result.push(tt); } @@ -417,11 +436,10 @@ fn convert_doc_comment( } /// A raw token (straight from lexer) converter -struct RawConverter<'a, SpanAnchor> { +struct RawConverter<'a> { lexed: parser::LexedStr<'a>, pos: usize, _offset: TextSize, - file_id: SpanAnchor, } trait SrcToken: std::fmt::Debug { @@ -432,48 +450,47 @@ trait SrcToken: std::fmt::Debug { fn to_text(&self, ctx: &Ctx) -> SmolStr; } -trait TokenConverter: Sized { +trait TokenConverter: Sized { type Token: SrcToken; fn convert_doc_comment( &self, token: &Self::Token, - span: SpanData, - ) -> Option>>>; + span: SpanData, + ) -> Option>>>; fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>; fn peek(&self) -> Option; - fn anchor(&self) -> SpanAnchor; - fn span_for(&self, range: TextRange) -> Option>; + fn span_for(&self, range: TextRange) -> Option>; } -impl SrcToken> for usize { - fn kind(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SyntaxKind { +impl SrcToken> for usize { + fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_, SpanAnchor>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_, SpanAnchor>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr { ctx.lexed.text(*self).into() } } -impl TokenConverter for RawConverter<'_, SpanAnchor> +impl TokenConverter for RawConverter<'_> where - SpanData: Span, + SpanData: Span, { type Token = usize; fn convert_doc_comment( &self, &token: &usize, - span: SpanData, - ) -> Option>>> { + span: SpanData, + ) -> Option>>> { let text = self.lexed.text(token); convert_doc_comment(&doc_comment(text), span) } @@ -497,34 +514,29 @@ where Some(self.pos) } - fn anchor(&self) -> SpanAnchor { - self.file_id - } - fn span_for(&self, _: TextRange) -> Option> { + fn span_for(&self, _: TextRange) -> Option> { None } } -struct Converter<'a, SpanAnchor> { +struct Converter<'a, Anchor, Ctx> { current: Option, preorder: PreorderWithTokens, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, /// Used to make the emitted text ranges in the spans relative to the span anchor. offset: TextSize, - file_id: SpanAnchor, - map: &'a TokenMap>, + map: &'a TokenMap>, censored: Vec, } -impl<'a, SpanAnchor> Converter<'a, SpanAnchor> { +impl<'a, Anchor, Ctx> Converter<'a, Anchor, Ctx> { fn new( node: &SyntaxNode, anchor_offset: TextSize, - file_id: SpanAnchor, censored: Vec, - map: &'a TokenMap>, - ) -> Converter<'a, SpanAnchor> { + map: &'a TokenMap>, + ) -> Self { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); let first = Self::next_token(&mut preorder, &censored); @@ -534,7 +546,6 @@ impl<'a, SpanAnchor> Converter<'a, SpanAnchor> { range, punct_offset: None, offset: anchor_offset, - file_id, censored, map, } @@ -569,36 +580,36 @@ impl SynToken { } } -impl SrcToken> for SynToken { - fn kind(&self, ctx: &Converter<'_, SpanAnchor>) -> SyntaxKind { +impl SrcToken> for SynToken { + fn kind(&self, ctx: &Converter<'_, Anchor, Ctx>) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), } } - fn to_char(&self, _ctx: &Converter<'_, SpanAnchor>) -> Option { + fn to_char(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> Option { match self { SynToken::Ordinary(_) => None, SynToken::Punct(it, i) => it.text().chars().nth(*i), } } - fn to_text(&self, _ctx: &Converter<'_, SpanAnchor>) -> SmolStr { + fn to_text(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> SmolStr { match self { SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(), } } } -impl TokenConverter for Converter<'_, SpanAnchor> +impl TokenConverter for Converter<'_, Anchor, Ctx> where - SpanData: Span, + SpanData: Span, { type Token = SynToken; fn convert_doc_comment( &self, token: &Self::Token, - span: SpanData, - ) -> Option>>> { + span: SpanData, + ) -> Option>>> { convert_doc_comment(token.token(), span) } @@ -657,27 +668,24 @@ where Some(token) } - fn anchor(&self) -> SpanAnchor { - self.file_id - } - fn span_for(&self, range: TextRange) -> Option> { + fn span_for(&self, range: TextRange) -> Option> { self.map.span_for_range(range) } } -struct TtTreeSink<'a, SpanAnchor> { +struct TtTreeSink<'a, Anchor, Ctx> { buf: String, - cursor: Cursor<'a, SpanData>, + cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: TokenMap>, + token_map: TokenMap>, } -impl<'a, SpanAnchor> TtTreeSink<'a, SpanAnchor> +impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx> where - SpanData: Span, + SpanData: Span, { - fn new(cursor: Cursor<'a, SpanData>) -> Self { + fn new(cursor: Cursor<'a, SpanData>) -> Self { TtTreeSink { buf: String::new(), cursor, @@ -687,7 +695,7 @@ where } } - fn finish(mut self) -> (Parse, TokenMap>) { + fn finish(mut self) -> (Parse, TokenMap>) { self.token_map.shrink_to_fit(); (self.inner.finish(), self.token_map) } @@ -705,9 +713,9 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl TtTreeSink<'_, SpanAnchor> +impl TtTreeSink<'_, Anchor, Ctx> where - SpanData: Span, + SpanData: Span, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index 32dfb4d7e0b39..0275e5397c9b4 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -4,20 +4,26 @@ use syntax::{ast, AstNode}; use test_utils::extract_annotations; use tt::{ buffer::{TokenBuffer, TokenTreeRef}, - Leaf, Punct, Spacing, Span, + Leaf, Punct, Spacing, SpanAnchor, SyntaxContext, }; -use crate::syntax_bridge::SpanData; - use super::syntax_node_to_token_tree; fn check_punct_spacing(fixture: &str) { + type SpanData = tt::SpanData; + #[derive(PartialEq, Eq, Clone, Copy, Debug)] struct DummyFile; - impl Span for DummyFile { + impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; } + #[derive(PartialEq, Eq, Clone, Copy, Debug)] + struct DummyCtx; + impl SyntaxContext for DummyCtx { + const DUMMY: Self = DummyCtx; + } + let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default()); diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index f0719777ab4df..6a7329e322f31 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -120,11 +120,13 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { Ok(()) } +/* + #[cfg(test)] mod tests { use tt::{ - Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Span, Subtree, TokenId, - TokenTree, + Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree, + TokenId, TokenTree, }; use super::*; @@ -176,3 +178,4 @@ mod tests { assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION)); } } +*/ diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index bfb3213a25d96..22d9f3952cc28 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -39,7 +39,7 @@ use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; use text_size::TextRange; -use tt::Span; +use tt::{Span, SyntaxContext}; use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; @@ -47,25 +47,30 @@ pub trait SerializableSpan: Span { fn into_u32(self) -> [u32; L]; fn from_u32(input: [u32; L]) -> Self; } -impl SerializableSpan<1> for tt::TokenId { - fn into_u32(self) -> [u32; 1] { - [self.0] - } - fn from_u32([input]: [u32; 1]) -> Self { - tt::TokenId(input) - } -} - -impl SerializableSpan<3> for tt::SpanData +// impl SerializableSpan<1> for tt::TokenId { +// fn into_u32(self) -> [u32; 1] { +// [self.0] +// } +// fn from_u32([input]: [u32; 1]) -> Self { +// tt::TokenId(input) +// } +// } + +impl SerializableSpan<3> for tt::SpanData where - FileId: From + Into, + Anchor: From + Into, Self: Span, + Ctx: SyntaxContext, { fn into_u32(self) -> [u32; 3] { [self.anchor.into(), self.range.start().into(), self.range.end().into()] } fn from_u32([file_id, start, end]: [u32; 3]) -> Self { - tt::SpanData { anchor: file_id.into(), range: TextRange::new(start.into(), end.into()) } + tt::SpanData { + anchor: file_id.into(), + range: TextRange::new(start.into(), end.into()), + ctx: Ctx::DUMMY, + } } } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index b6dcc26de6210..4742dc1dfa79f 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -210,7 +210,7 @@ mod tests { use cfg::CfgExpr; use hir::HirFileId; - use ide_db::base_db::span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}; + use ide_db::base_db::span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use mbe::syntax_node_to_token_tree; use syntax::{ ast::{self, AstNode}, @@ -221,7 +221,7 @@ mod tests { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree( + let tt = syntax_node_to_token_tree::<_, SyntaxContextId>( tt.syntax(), SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID }, TextSize::new(0), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 89cb12d2c2670..a384af2a9a33d 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -32,27 +32,37 @@ impl TokenId { Self::UNSPECIFIED } } -impl Span for TokenId { - const DUMMY: Self = TokenId(!0); -} #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] -pub struct SpanData { +pub struct SpanData { /// The text range of this span, relative to the anchor. + /// We need the anchor for incrementality, as storing absolute ranges will require + /// recomputation on every change in a file at all times. pub range: TextRange, pub anchor: Anchor, + /// The syntax context of the span. + pub ctx: Ctx, } -impl Span for SpanData { - const DUMMY: Self = - SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY }; +impl Span for SpanData { + const DUMMY: Self = SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: Anchor::DUMMY, + ctx: Ctx::DUMMY, + }; +} + +pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq { + const DUMMY: Self; } pub trait Span: std::fmt::Debug + Copy + Sized + Eq { const DUMMY: Self; } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct SyntaxContext(pub u32); + +pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { + const DUMMY: Self; +} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TokenTree { From 05f375eae29927565da546c48fc64b843a90197e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 17 Nov 2023 19:07:31 +0100 Subject: [PATCH 06/80] hygiene 2.0 --- Cargo.toml | 2 +- crates/base-db/src/span.rs | 11 +- crates/hir-def/src/data.rs | 3 +- crates/hir-def/src/expander.rs | 17 +- crates/hir-def/src/find_path.rs | 4 +- crates/hir-def/src/item_tree.rs | 10 +- crates/hir-def/src/item_tree/lower.rs | 46 ++-- crates/hir-def/src/item_tree/pretty.rs | 2 +- crates/hir-def/src/lib.rs | 23 +- crates/hir-def/src/lower.rs | 22 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 13 +- .../macro_expansion_tests/mbe/metavar_expr.rs | 4 +- .../macro_expansion_tests/mbe/regression.rs | 4 +- .../mbe/tt_conversion.rs | 6 +- .../hir-def/src/macro_expansion_tests/mod.rs | 5 +- crates/hir-def/src/nameres/collector.rs | 69 +++--- crates/hir-def/src/path/lower.rs | 75 +++--- crates/hir-def/src/test_db.rs | 3 +- crates/hir-def/src/visibility.rs | 8 +- crates/hir-expand/src/attrs.rs | 38 ++- crates/hir-expand/src/db.rs | 179 +++++++++----- crates/hir-expand/src/eager.rs | 26 +- crates/hir-expand/src/hygiene.rs | 198 ++++++++++----- crates/hir-expand/src/lib.rs | 9 +- crates/hir-expand/src/mod_path.rs | 93 +++++-- crates/hir-expand/src/name.rs | 1 + crates/hir-ty/src/display.rs | 6 +- crates/hir-ty/src/test_db.rs | 3 +- crates/hir/src/attrs.rs | 4 +- crates/hir/src/db.rs | 2 +- crates/hir/src/lib.rs | 5 +- crates/hir/src/semantics.rs | 4 +- crates/hir/src/source_analyzer.rs | 4 +- crates/ide-db/src/apply_change.rs | 1 - crates/ide-db/src/lib.rs | 1 - crates/mbe/src/benchmark.rs | 4 +- crates/mbe/src/expander.rs | 9 +- crates/mbe/src/expander/matcher.rs | 4 +- crates/mbe/src/expander/transcriber.rs | 233 +++++++++++------- crates/mbe/src/lib.rs | 13 +- crates/mbe/src/syntax_bridge.rs | 6 +- crates/mbe/src/token_map.rs | 18 +- crates/tt/src/lib.rs | 12 +- 43 files changed, 750 insertions(+), 450 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 73bb9c84d2c95..bf98d7ecf9c14 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ authors = ["rust-analyzer team"] [profile.dev] # Disabling debug info speeds up builds a bunch, # and we don't rely on it for debugging that much. -debug = 0 +debug = 1 [profile.dev.package] # These speed up local tests. diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index acc1e5243f731..f430a36ddfb49 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -17,9 +17,18 @@ pub struct SyntaxContextId(InternId); crate::impl_intern_key!(SyntaxContextId); impl SyntaxContext for SyntaxContextId { + const DUMMY: Self = Self::ROOT; + // veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so + // we need a special value that behaves as the current context. +} +// inherent trait impls please tyvm +impl SyntaxContextId { + // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context + // currently (which kind of makes sense but we need it here!) + pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) - const DUMMY: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); + pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) }); } #[derive(Copy, Clone, PartialEq, Eq, Hash)] diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 4083d497917ce..9986870d9dce6 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> { } AssocItem::MacroCall(call) => { let file_id = self.expander.current_file_id(); - let MacroCall { ast_id, expand_to, ref path } = item_tree[call]; + let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call]; let module = self.expander.module.local_id; let resolver = |path| { @@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> { match macro_call_as_call_id( self.db.upcast(), &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), + call_site, expand_to, self.expander.module.krate(), resolver, diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index b7cffb57625ee..793c8ddeb504d 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -7,11 +7,12 @@ use base_db::{ use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, - InFile, MacroCallId, UnresolvedMacro, + attrs::RawAttrs, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, + SpanMap, UnresolvedMacro, }; use limit::Limit; use syntax::{ast, Parse, SyntaxNode}; +use triomphe::Arc; use crate::{ attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, @@ -21,7 +22,7 @@ use crate::{ #[derive(Debug)] pub struct Expander { cfg_options: CfgOptions, - hygiene: Hygiene, + hygiene: Arc, krate: CrateId, pub(crate) current_file_id: HirFileId, pub(crate) module: ModuleId, @@ -44,7 +45,7 @@ impl Expander { recursion_depth: 0, recursion_limit, cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), - hygiene: Hygiene::new(db.upcast(), current_file_id), + hygiene: db.span_map(current_file_id), krate: module.krate, } } @@ -98,7 +99,7 @@ impl Expander { } pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.hygiene = Hygiene::new(db.upcast(), mark.file_id); + self.hygiene = db.span_map(mark.file_id); self.current_file_id = mark.file_id; if self.recursion_depth == u32::MAX { // Recursion limit has been reached somewhere in the macro expansion tree. Reset the @@ -113,7 +114,7 @@ impl Expander { } pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { - LowerCtx::new(db, &self.hygiene, self.current_file_id) + LowerCtx::new(db, self.hygiene.clone(), self.current_file_id) } pub(crate) fn to_source(&self, value: T) -> InFile { @@ -143,7 +144,7 @@ impl Expander { } pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option { - let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id); + let ctx = LowerCtx::new(db, self.hygiene.clone(), self.current_file_id); Path::from_src(path, &ctx) } @@ -187,7 +188,7 @@ impl Expander { let parse = value.cast::()?; self.recursion_depth += 1; - self.hygiene = Hygiene::new(db.upcast(), file_id); + self.hygiene = db.span_map(file_id); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let mark = Mark { file_id: old_file_id, diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 1ebd1ba0e66db..5051884714ba4 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -586,7 +586,7 @@ fn find_local_import_locations( #[cfg(test)] mod tests { use base_db::fixture::WithFixture; - use hir_expand::hygiene::Hygiene; + use hir_expand::SpanMap; use syntax::ast::AstNode; use crate::test_db::TestDB; @@ -608,7 +608,7 @@ mod tests { let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); - let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap(); + let mod_path = ModPath::from_src(&db, ast_path, &SpanMap::default()).unwrap(); let def_map = module.def_map(&db); let resolved = def_map diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 3bea91ee61c3c..901e14a2117ba 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -44,14 +44,13 @@ use std::{ use ast::{AstNode, HasName, StructKind}; use base_db::{ - span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, CrateId, }; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::RawAttrs, - hygiene::Hygiene, name::{name, AsName, Name}, ExpandTo, HirFileId, InFile, }; @@ -122,7 +121,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.span_map())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -750,6 +749,7 @@ pub struct MacroCall { pub path: Interned, pub ast_id: FileAstId, pub expand_to: ExpandTo, + pub call_site: SyntaxContextId, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -779,7 +779,7 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = Hygiene::new(db.upcast(), file_id); + let hygiene = db.span_map(file_id); let (_, source_map) = lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); source_map[index].clone() @@ -794,7 +794,7 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = Hygiene::new(db.upcast(), file_id); + let hygiene = db.span_map(file_id); lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1 } } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 807b2a7bf752d..0b3def6d756b6 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -3,7 +3,7 @@ use std::collections::hash_map::Entry; use base_db::span::ErasedFileAstId; -use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; +use hir_expand::{ast_id_map::AstIdMap, HirFileId, SpanMap}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ @@ -37,8 +37,8 @@ impl<'a> Ctx<'a> { } } - pub(super) fn hygiene(&self) -> &Hygiene { - self.body_ctx.hygiene() + pub(super) fn span_map(&self) -> &SpanMap { + self.body_ctx.span_map() } pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree { @@ -90,7 +90,7 @@ impl<'a> Ctx<'a> { ast_id: self.source_ast_id_map.ast_id(block).erase(), }, block, - self.hygiene(), + self.span_map(), ), ); self.tree.top_level = block @@ -145,7 +145,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() }, item, - self.hygiene(), + self.span_map(), ); self.add_attrs(mod_item.into(), attrs); @@ -174,7 +174,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() }, item_node, - self.hygiene(), + self.span_map(), ); self.add_attrs( match item { @@ -227,7 +227,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id }, &field, - self.hygiene(), + self.span_map(), ), ); } @@ -260,7 +260,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id }, &field, - self.hygiene(), + self.span_map(), ), ); } @@ -314,7 +314,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id }, &variant, - self.hygiene(), + self.span_map(), ), ); } @@ -370,7 +370,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, &self_param, - self.hygiene(), + self.span_map(), ), ); has_self_param = true; @@ -396,7 +396,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, ¶m, - self.hygiene(), + self.span_map(), ), ); } @@ -585,7 +585,7 @@ impl<'a> Ctx<'a> { fn lower_use(&mut self, use_item: &ast::Use) -> Option> { let visibility = self.lower_visibility(use_item); let ast_id = self.source_ast_id_map.ast_id(use_item); - let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?; + let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?; let res = Use { visibility, ast_id, use_tree }; Some(id(self.data().uses.alloc(res))) @@ -607,10 +607,18 @@ impl<'a> Ctx<'a> { } fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option> { - let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?); + let span_map = self.span_map(); + let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?); let ast_id = self.source_ast_id_map.ast_id(m); let expand_to = hir_expand::ExpandTo::from_call_site(m); - let res = MacroCall { path, ast_id, expand_to }; + let res = MacroCall { + path, + ast_id, + expand_to, + call_site: span_map + .span_for_range(m.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx), + }; Some(id(self.data().macro_calls.alloc(res))) } @@ -655,7 +663,7 @@ impl<'a> Ctx<'a> { ast_id: mod_item.ast_id(&self.tree).erase(), }, &item, - self.hygiene(), + self.span_map(), ); self.add_attrs(mod_item.into(), attrs); Some(mod_item) @@ -697,7 +705,7 @@ impl<'a> Ctx<'a> { self.db.upcast(), SpanAnchor { file_id: self.file, ast_id: owner_ast_id }, ¶m, - self.body_ctx.hygiene(), + self.body_ctx.span_map(), ); // This is identical to the body of `Ctx::add_attrs()` but we can't call that here // because it requires `&mut self` and the call to `generics.fill()` below also @@ -731,7 +739,7 @@ impl<'a> Ctx<'a> { } fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { - let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene()); + let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.span_map()); self.data().vis.alloc(vis) } @@ -809,7 +817,7 @@ fn lower_abi(abi: ast::Abi) -> Interned { struct UseTreeLowering<'a> { db: &'a dyn DefDatabase, - hygiene: &'a Hygiene, + hygiene: &'a SpanMap, mapping: Arena, } @@ -877,7 +885,7 @@ impl UseTreeLowering<'_> { pub(crate) fn lower_use_tree( db: &dyn DefDatabase, - hygiene: &Hygiene, + hygiene: &SpanMap, tree: ast::UseTree, ) -> Option<(UseTree, Arena)> { let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index ca3785bf28dfa..244111d202ceb 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -457,7 +457,7 @@ impl Printer<'_> { } } ModItem::MacroCall(it) => { - let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it]; + let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it]; wln!(self, "{}!(...);", path.display(self.db.upcast())); } ModItem::MacroRules(it) => { diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 5f09d7c481345..0da605819f9e6 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -63,7 +63,7 @@ use std::{ panic::{RefUnwindSafe, UnwindSafe}, }; -use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; +use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind}; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::{Attr, AttrId, AttrInput}, @@ -72,7 +72,6 @@ use hir_expand::{ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, eager::expand_eager_macro_input, - hygiene::Hygiene, name::Name, proc_macro::ProcMacroExpander, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, @@ -82,7 +81,7 @@ use item_tree::ExternBlock; use la_arena::Idx; use nameres::DefMap; use stdx::impl_from; -use syntax::ast; +use syntax::{ast, AstNode}; pub use hir_expand::tt; @@ -1166,16 +1165,21 @@ impl AsMacroCall for InFile<&ast::MacroCall> { ) -> Result>, UnresolvedMacro> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); - let h = Hygiene::new(db, self.file_id); - let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h)); + let span_map = db.span_map(self.file_id); + let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &span_map)); let Some(path) = path else { return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); }; + let call_site = span_map + .span_for_range(self.value.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx); + macro_call_as_call_id_with_eager( db, &AstIdWithPath::new(ast_id.file_id, ast_id.value, path), + call_site, expands_to, krate, resolver, @@ -1200,17 +1204,19 @@ impl AstIdWithPath { fn macro_call_as_call_id( db: &dyn ExpandDatabase, call: &AstIdWithPath, + call_site: SyntaxContextId, expand_to: ExpandTo, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option + Copy, ) -> Result, UnresolvedMacro> { - macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver) + macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver) .map(|res| res.value) } fn macro_call_as_call_id_with_eager( db: &dyn ExpandDatabase, call: &AstIdWithPath, + call_site: SyntaxContextId, expand_to: ExpandTo, krate: CrateId, resolver: impl FnOnce(path::ModPath) -> Option, @@ -1231,6 +1237,7 @@ fn macro_call_as_call_id_with_eager( db, krate, MacroCallKind::FnLike { ast_id: call.ast_id, expand_to }, + call_site, )), err: None, }, @@ -1329,6 +1336,8 @@ fn derive_macro_as_call_id( derive_index: derive_pos, derive_attr_index, }, + //FIXME + SyntaxContextId::ROOT, ); Ok((macro_id, def_id, call_id)) } @@ -1358,6 +1367,8 @@ fn attr_macro_as_call_id( attr_args: Arc::new(arg), invoc_attr_index: macro_attr.id, }, + //FIXME + SyntaxContextId::ROOT, ) } intern::impl_internable!( diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index 52781d9889212..28a652a60a768 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -3,8 +3,7 @@ use std::cell::OnceCell; use hir_expand::{ ast_id_map::{AstIdMap, AstIdNode}, - hygiene::Hygiene, - AstId, HirFileId, InFile, + AstId, HirFileId, InFile, SpanMap, }; use syntax::ast; use triomphe::Arc; @@ -13,28 +12,25 @@ use crate::{db::DefDatabase, path::Path}; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, - hygiene: Hygiene, + hygiene: Arc, + // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways. ast_id_map: Option<(HirFileId, OnceCell>)>, } impl<'a> LowerCtx<'a> { - pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self { - LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) } + pub fn new(db: &'a dyn DefDatabase, hygiene: Arc, file_id: HirFileId) -> Self { + LowerCtx { db, hygiene, ast_id_map: Some((file_id, OnceCell::new())) } } pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { - LowerCtx { - db, - hygiene: Hygiene::new(db.upcast(), file_id), - ast_id_map: Some((file_id, OnceCell::new())), - } + LowerCtx { db, hygiene: db.span_map(file_id), ast_id_map: Some((file_id, OnceCell::new())) } } - pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self { - LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None } + pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: Arc) -> Self { + LowerCtx { db, hygiene, ast_id_map: None } } - pub(crate) fn hygiene(&self) -> &Hygiene { + pub(crate) fn span_map(&self) -> &SpanMap { &self.hygiene } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index b5d70052a89e8..af4b3e12b9f3e 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -26,6 +26,7 @@ macro_rules! f { // +tokenids f!(struct MyTraitMap2); "#, + // FIXME: #SpanAnchor(FileId(0), 1)@91..92 why is there whitespace annotated with a span here? expect![[r#" macro_rules! f { ( struct $ident:ident ) => { @@ -36,7 +37,7 @@ macro_rules! f { } struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73 - map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 ::std#SpanAnchor(FileId(0), 1)@93..96::collections#SpanAnchor(FileId(0), 1)@98..109::HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123 + map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 #SpanAnchor(FileId(0), 1)@91..92::#SpanAnchor(FileId(0), 1)@92..93std#SpanAnchor(FileId(0), 1)@93..96::#SpanAnchor(FileId(0), 1)@97..98collections#SpanAnchor(FileId(0), 1)@98..109::#SpanAnchor(FileId(0), 1)@110..111HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123 }#SpanAnchor(FileId(0), 1)@132..133 "#]], ); @@ -938,9 +939,9 @@ macro_rules! vec { fn f() { { let mut v = Vec::new(); - v.push(1); - v.push(2); - v.push(3); + v.push((1)); + v.push((2)); + v.push((3)); v }; } @@ -1409,8 +1410,8 @@ macro_rules! matches { }; } fn main() { - match 0 { - 0|1 if true =>true , _=>false + match (0) { + 0|1 if (true )=>true , _=>false }; } "#]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 967b5ad36babf..dd83e5c04d45b 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -62,10 +62,10 @@ macro_rules !implement_methods { struct Foo; impl Foo { fn alpha() -> &'static[u32] { - &[1, 2, 3] + &[(1), (2), (3)] } fn beta() -> &'static[u32] { - &[1, 2, 3] + &[(1), (2), (3)] } } "#]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 2886b2a366c04..71dbb400b5487 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -39,8 +39,8 @@ fn main() { }; { let mut v = Vec::new(); - v.push(1u32); - v.push(2); + v.push((1u32)); + v.push((2)); v }; } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index ae56934f632f1..b2ac7eb4094dd 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -192,9 +192,9 @@ macro_rules! constant { ($e:expr ;) => {$e}; } -const _: () = 0.0; -const _: () = 0.; -const _: () = 0e0; +const _: () = (0.0); +const _: () = (0.); +const _: () = (0e0); "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 1a672b4605c66..d4902c52e748d 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -123,8 +123,9 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } else { assert!( parse.errors().is_empty(), - "parse errors in expansion: \n{:#?}", - parse.errors() + "parse errors in expansion: \n{:#?}\n```\n{}\n```", + parse.errors(), + parse.syntax_node(), ); } let pp = pretty_print_macro_expansion( diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 659e7ed5033e4..360bf0f93e30b 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -6,7 +6,7 @@ use std::{cmp::Ordering, iter, mem}; use ::tt::Span; -use base_db::{CrateId, Dependency, Edition, FileId}; +use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -15,7 +15,6 @@ use hir_expand::{ builtin_attr_macro::find_builtin_attr, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, - hygiene::Hygiene, name::{name, AsName, Name}, proc_macro::ProcMacroExpander, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, @@ -112,7 +111,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI from_glob_import: Default::default(), skip_attrs: Default::default(), is_proc_macro, - hygienes: FxHashMap::default(), }; if tree_id.is_block() { collector.seed_with_inner(tree_id); @@ -212,9 +210,22 @@ struct MacroDirective { #[derive(Clone, Debug, Eq, PartialEq)] enum MacroDirectiveKind { - FnLike { ast_id: AstIdWithPath, expand_to: ExpandTo }, - Derive { ast_id: AstIdWithPath, derive_attr: AttrId, derive_pos: usize }, - Attr { ast_id: AstIdWithPath, attr: Attr, mod_item: ModItem, tree: TreeId }, + FnLike { + ast_id: AstIdWithPath, + expand_to: ExpandTo, + call_site: SyntaxContextId, + }, + Derive { + ast_id: AstIdWithPath, + derive_attr: AttrId, + derive_pos: usize, + }, + Attr { + ast_id: AstIdWithPath, + attr: Attr, + mod_item: ModItem, + /* is this needed? */ tree: TreeId, + }, } /// Walks the tree of module recursively @@ -242,12 +253,6 @@ struct DefCollector<'a> { /// This also stores the attributes to skip when we resolve derive helpers and non-macro /// non-builtin attributes in general. skip_attrs: FxHashMap, AttrId>, - /// `Hygiene` cache, because `Hygiene` construction is expensive. - /// - /// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction. - /// However, `DefCollector` still needs to lower paths in attributes, in particular those in - /// derive meta item list. - hygienes: FxHashMap, } impl DefCollector<'_> { @@ -315,9 +320,8 @@ impl DefCollector<'_> { } if *attr_name == hir_expand::name![feature] { - let hygiene = &Hygiene::new_unhygienic(); let features = attr - .parse_path_comma_token_tree(self.db.upcast(), hygiene) + .parse_path_comma_token_tree(self.db.upcast()) .into_iter() .flatten() .filter_map(|feat| match feat.segments() { @@ -1119,10 +1123,11 @@ impl DefCollector<'_> { let resolver_def_id = |path| resolver(path).map(|(_, it)| it); match &directive.kind { - MacroDirectiveKind::FnLike { ast_id, expand_to } => { + MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { let call_id = macro_call_as_call_id( self.db.upcast(), ast_id, + *call_site, *expand_to, self.def_map.krate, resolver_def_id, @@ -1234,19 +1239,7 @@ impl DefCollector<'_> { }; let ast_id = ast_id.with_value(ast_adt_id); - let extend_unhygenic; - let hygiene = if file_id.is_macro() { - self.hygienes - .entry(file_id) - .or_insert_with(|| Hygiene::new(self.db.upcast(), file_id)) - } else { - // Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry - // when we're in an oridinary (non-macro) file. - extend_unhygenic = Hygiene::new_unhygienic(); - &extend_unhygenic - }; - - match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) { + match attr.parse_path_comma_token_tree(self.db.upcast()) { Some(derive_macros) => { let mut len = 0; for (idx, path) in derive_macros.enumerate() { @@ -1414,11 +1407,12 @@ impl DefCollector<'_> { for directive in &self.unresolved_macros { match &directive.kind { - MacroDirectiveKind::FnLike { ast_id, expand_to } => { + MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => { // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error! let macro_call_as_call_id = macro_call_as_call_id( self.db.upcast(), ast_id, + *call_site, *expand_to, self.def_map.krate, |path| { @@ -1823,9 +1817,8 @@ impl ModCollector<'_, '_> { cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); let mut single_imports = Vec::new(); - let hygiene = Hygiene::new_unhygienic(); for attr in macro_use_attrs { - let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else { + let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else { // `#[macro_use]` (without any paths) found, forget collected names and just import // all visible macros. self.def_collector.import_macros_from_extern_crate( @@ -2209,8 +2202,12 @@ impl ModCollector<'_, '_> { } } - fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) { - let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path)); + fn collect_macro_call( + &mut self, + &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall, + container: ItemContainerId, + ) { + let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path)); let db = self.def_collector.db; // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define @@ -2221,7 +2218,8 @@ impl ModCollector<'_, '_> { if let Ok(res) = macro_call_as_call_id_with_eager( db.upcast(), &ast_id, - mac.expand_to, + call_site, + expand_to, self.def_collector.def_map.krate, |path| { path.as_ident().and_then(|name| { @@ -2275,7 +2273,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_macros.push(MacroDirective { module_id: self.module_id, depth: self.macro_depth + 1, - kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to }, + kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site }, container, }); } @@ -2362,7 +2360,6 @@ mod tests { from_glob_import: Default::default(), skip_attrs: Default::default(), is_proc_macro: false, - hygienes: FxHashMap::default(), }; collector.seed_with_top_level(); collector.collect(); diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index abd817893cc4c..ee49dfa44c80c 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -4,8 +4,11 @@ use std::iter; use crate::{lower::LowerCtx, type_ref::ConstRef}; -use either::Either; -use hir_expand::name::{name, AsName}; +use base_db::span::SyntaxContextId; +use hir_expand::{ + mod_path::resolve_crate_root, + name::{name, AsName}, +}; use intern::Interned; use syntax::ast::{self, AstNode, HasTypeBounds}; @@ -14,14 +17,17 @@ use crate::{ type_ref::{LifetimeRef, TypeBound, TypeRef}, }; +// fn resolve_crate_root + /// Converts an `ast::Path` to `Path`. Works with use trees. /// It correctly handles `$crate` based path from macro call. +// FIXME: flip the params pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { let mut kind = PathKind::Plain; let mut type_anchor = None; let mut segments = Vec::new(); let mut generic_args = Vec::new(); - let hygiene = ctx.hygiene(); + let hygiene = ctx.span_map(); loop { let segment = path.segment()?; @@ -31,31 +37,34 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { - // FIXME: this should just return name - match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) { - Either::Left(name) => { - let args = segment - .generic_arg_list() - .and_then(|it| lower_generic_args(ctx, it)) - .or_else(|| { - lower_generic_args_from_fn_path( - ctx, - segment.param_list(), - segment.ret_type(), - ) - }) - .map(Interned::new); - if let Some(_) = args { - generic_args.resize(segments.len(), None); - generic_args.push(args); - } - segments.push(name); - } - Either::Right(crate_id) => { - kind = PathKind::DollarCrate(crate_id); - break; - } + let name = if name_ref.text() == "$crate" { + kind = resolve_crate_root( + ctx.db.upcast(), + hygiene + .span_for_range(name_ref.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx), + ) + .map(PathKind::DollarCrate)?; + break; + } else { + name_ref.as_name() + }; + let args = segment + .generic_arg_list() + .and_then(|it| lower_generic_args(ctx, it)) + .or_else(|| { + lower_generic_args_from_fn_path( + ctx, + segment.param_list(), + segment.ret_type(), + ) + }) + .map(Interned::new); + if let Some(_) = args { + generic_args.resize(segments.len(), None); + generic_args.push(args); } + segments.push(name); } ast::PathSegmentKind::SelfTypeKw => { segments.push(name![Self]); @@ -151,8 +160,16 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option Self { let mut this = Self { storage: Default::default(), events: Default::default() }; + this.intern_syntax_context(SyntaxContextData::root()); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index 30f48de61f2e8..b341b8cfbd961 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -2,7 +2,7 @@ use std::iter; -use hir_expand::{hygiene::Hygiene, InFile}; +use hir_expand::{InFile, SpanMap}; use la_arena::ArenaMap; use syntax::ast; use triomphe::Arc; @@ -34,13 +34,13 @@ impl RawVisibility { db: &dyn DefDatabase, node: InFile>, ) -> RawVisibility { - Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id)) + Self::from_ast_with_hygiene(db, node.value, &db.span_map(node.file_id)) } pub(crate) fn from_ast_with_hygiene( db: &dyn DefDatabase, node: Option, - hygiene: &Hygiene, + hygiene: &SpanMap, ) -> RawVisibility { Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene) } @@ -49,7 +49,7 @@ impl RawVisibility { db: &dyn DefDatabase, node: Option, default: RawVisibility, - hygiene: &Hygiene, + hygiene: &SpanMap, ) -> RawVisibility { let node = match node { None => return default, diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 01a66cd03ab1d..5ce12d2f6e717 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -13,10 +13,9 @@ use triomphe::Arc; use crate::{ db::ExpandDatabase, - hygiene::Hygiene, mod_path::ModPath, tt::{self, Subtree}, - InFile, + InFile, SpanMap, }; /// Syntactical attributes, without filtering of `cfg_attr`s. @@ -44,7 +43,7 @@ impl RawAttrs { db: &dyn ExpandDatabase, span_anchor: SpanAnchor, owner: &dyn ast::HasAttrs, - hygiene: &Hygiene, + hygiene: &SpanMap, ) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { @@ -69,8 +68,7 @@ impl RawAttrs { span_anchor: SpanAnchor, owner: InFile<&dyn ast::HasAttrs>, ) -> Self { - let hygiene = Hygiene::new(db, owner.file_id); - Self::new(db, span_anchor, owner.value, &hygiene) + Self::new(db, span_anchor, owner.value, &db.span_map(owner.file_id)) } pub fn merge(&self, other: Self) -> Self { @@ -135,9 +133,7 @@ impl RawAttrs { delimiter: tt::Delimiter::unspecified(), token_trees: attr.to_vec(), }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx)) + Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) }, ); @@ -220,7 +216,7 @@ impl Attr { db: &dyn ExpandDatabase, span_anchor: SpanAnchor, ast: ast::Meta, - hygiene: &Hygiene, + hygiene: &SpanMap, id: AttrId, ) -> Option { let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); @@ -234,9 +230,7 @@ impl Attr { // FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor let offset = db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start(); - // FIXME: Spanmap - let tree = - syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, &Default::default()); + let tree = syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, hygiene); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None @@ -244,18 +238,13 @@ impl Attr { Some(Attr { id, path, input }) } - fn from_tt( - db: &dyn ExpandDatabase, - tt: &tt::Subtree, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { + fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { // FIXME: Unecessary roundtrip tt -> ast -> tt let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let ast = ast::Meta::cast(parse.syntax_node())?; // FIXME: we discard spans here! - Self::from_src(db, SpanAnchor::DUMMY, ast, hygiene, id) + Self::from_src(db, SpanAnchor::DUMMY, ast, &SpanMap::default(), id) } pub fn path(&self) -> &ModPath { @@ -295,9 +284,9 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - hygiene: &'a Hygiene, ) -> Option + 'a> { let args = self.token_tree_value()?; + dbg!(args); if args.delimiter.kind != DelimiterKind::Parenthesis { return None; @@ -309,12 +298,13 @@ impl Attr { if tts.is_empty() { return None; } - // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here. + // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation + // here. let subtree = tt::Subtree { delimiter: tt::Delimiter::unspecified(), - token_trees: tts.into_iter().cloned().collect(), + token_trees: tts.to_vec(), }; - let (parse, _) = + let (parse, span_map) = mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem); let meta = ast::Meta::cast(parse.syntax_node())?; // Only simple paths are allowed. @@ -323,7 +313,7 @@ impl Attr { return None; } let path = meta.path()?; - ModPath::from_src(db, path, hygiene) + ModPath::from_src(db, path, &span_map) }); Some(paths) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 1a68653a6fcd1..7dd69099a6e3f 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,6 +1,6 @@ //! Defines database & queries for macro expansion. -use ::tt::SyntaxContext; +use ::tt::{SpanAnchor as _, SyntaxContext}; use base_db::{ salsa, span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, @@ -17,9 +17,10 @@ use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, + attrs::RawAttrs, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, - hygiene::{self, HygieneFrame, SyntaxContextData}, + hygiene::{self, SyntaxContextData, Transparency}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, @@ -37,16 +38,35 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); /// Old-style `macro_rules` or the new macros 2.0 pub struct DeclarativeMacroExpander { pub mac: mbe::DeclarativeMacro, + pub transparency: Transparency, } impl DeclarativeMacroExpander { - pub fn expand(&self, tt: tt::Subtree) -> ExpandResult { + pub fn expand( + &self, + db: &dyn ExpandDatabase, + tt: tt::Subtree, + call_id: MacroCallId, + ) -> ExpandResult { + match self.mac.err() { + Some(e) => ExpandResult::new( + tt::Subtree::empty(), + ExpandError::other(format!("invalid macro definition: {e}")), + ), + None => self + .mac + .expand(&tt, |s| s.ctx = db.apply_mark(s.ctx, call_id, self.transparency)) + .map_err(Into::into), + } + } + + pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult { match self.mac.err() { Some(e) => ExpandResult::new( tt::Subtree::empty(), ExpandError::other(format!("invalid macro definition: {e}")), ), - None => self.mac.expand(&tt).map_err(Into::into), + None => self.mac.expand(&tt, |_| ()).map_err(Into::into), } } } @@ -83,6 +103,9 @@ pub trait ExpandDatabase: SourceDatabase { &self, macro_file: MacroFile, ) -> ExpandResult<(Parse, Arc)>; + // TODO: transparent? + #[salsa::transparent] + fn span_map(&self, file_id: HirFileId) -> Arc; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -97,8 +120,8 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::invoke(hygiene::apply_mark)] fn apply_mark( &self, - ctxt: SyntaxContextData, - file_id: HirFileId, + ctxt: SyntaxContextId, + call_id: MacroCallId, transparency: hygiene::Transparency, ) -> SyntaxContextId; @@ -137,8 +160,13 @@ pub trait ExpandDatabase: SourceDatabase { &self, macro_call: MacroCallId, ) -> ExpandResult>; +} - fn hygiene_frame(&self, file_id: HirFileId) -> Arc; +fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { + match file_id.repr() { + HirFileIdRepr::FileId(_) => Arc::new(Default::default()), + HirFileIdRepr::MacroFile(m) => db.parse_macro_expansion(m).value.1, + } } /// This expands the given macro call, but with different arguments. This is @@ -220,7 +248,9 @@ pub fn expand_speculative( ), ) } - MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt), + MacroDefKind::Declarative(it) => { + db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) + } MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltInEager(it, _) => { it.expand(db, actual_macro_call, &tt).map_err(Into::into) @@ -229,7 +259,9 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); - let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); + let (node, mut rev_tmap) = + token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); + rev_tmap.real_file = false; let syntax_node = node.syntax_node(); let token = rev_tmap @@ -285,7 +317,8 @@ fn parse_macro_expansion( tracing::debug!("expanded = {}", tt.as_debug_string()); tracing::debug!("kind = {:?}", expand_to); - let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); + let (parse, mut rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); + rev_token_map.real_file = false; ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -464,41 +497,70 @@ fn decl_macro_expander( (parse.syntax_node(), map) } }; - let mac = match id.to_ptr(db).to_node(&root) { - ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, - macro_rules.syntax().text_range().start(), - &map, - ); - let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); - mac - } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - }, - ast::Macro::MacroDef(macro_def) => match macro_def.body() { - Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, - macro_def.syntax().text_range().start(), - &map, - ); - let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); - mac - } - None => mbe::DeclarativeMacro::from_err( - mbe::ParseError::Expected("expected a token tree".into()), - is_2021, - ), - }, + + let transparency = |node| { + // ... would be nice to have the item tree here + let attrs = + RawAttrs::new(db, SpanAnchor::DUMMY, node, &Default::default()).filter(db, def_crate); + match &*attrs + .iter() + .find(|it| { + it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency") + })? + .token_tree_value()? + .token_trees + { + [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text { + "transparent" => Some(Transparency::Transparent), + "semitransparent" => Some(Transparency::SemiTransparent), + "opaque" => Some(Transparency::Opaque), + _ => None, + }, + _ => None, + } + }; + + let (mac, transparency) = match id.to_ptr(db).to_node(&root) { + ast::Macro::MacroRules(macro_rules) => ( + match macro_rules.token_tree() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_rules.syntax().text_range().start(), + &map, + ); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); + mac + } + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + }, + transparency(¯o_rules).unwrap_or(Transparency::SemiTransparent), + ), + ast::Macro::MacroDef(macro_def) => ( + match macro_def.body() { + Some(arg) => { + let tt = mbe::syntax_node_to_token_tree( + arg.syntax(), + SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, + macro_def.syntax().text_range().start(), + &map, + ); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); + mac + } + None => mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + }, + transparency(¯o_def).unwrap_or(Transparency::Opaque), + ), }; - Arc::new(DeclarativeMacroExpander { mac }) + Arc::new(DeclarativeMacroExpander { mac, transparency }) } fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { @@ -514,12 +576,15 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { } } -fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { +fn macro_expand( + db: &dyn ExpandDatabase, + macro_call_id: MacroCallId, +) -> ExpandResult> { let _p = profile::span("macro_expand"); - let loc = db.lookup_intern_macro_call(id); + let loc = db.lookup_intern_macro_call(macro_call_id); let ExpandResult { value: tt, mut err } = match loc.def.kind { - MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id), + MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), MacroDefKind::BuiltInDerive(expander, ..) => { // FIXME: add firewall query for this? let hir_file_id = loc.kind.file_id(); @@ -538,7 +603,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult map, @@ -554,7 +619,7 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - let ValueResult { value, err } = db.macro_arg(id); + let ValueResult { value, err } = db.macro_arg(macro_call_id); let Some(macro_arg) = value else { return ExpandResult { value: Arc::new(tt::Subtree { @@ -570,9 +635,11 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult { - db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) + db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) + } + MacroDefKind::BuiltIn(it, _) => { + it.expand(db, macro_call_id, &arg).map_err(Into::into) } - MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into), // This might look a bit odd, but we do not expand the inputs to eager macros here. // Eager macros inputs are expanded, well, eagerly when we collect the macro calls. // That kind of expansion uses the ast id map of an eager macros input though which goes through @@ -594,8 +661,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult it.expand(db, id, &arg).map_err(Into::into), - MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), + MacroDefKind::BuiltInEager(it, _) => { + it.expand(db, macro_call_id, &arg).map_err(Into::into) + } + MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg), _ => unreachable!(), } } @@ -653,10 +722,6 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult Arc { - Arc::new(HygieneFrame::new(db, file_id)) -} - fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { db.lookup_intern_macro_call(id).expand_to() } diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index ae5f26b5d36a2..dc6507a92de54 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -19,7 +19,7 @@ //! //! See the full discussion : use base_db::{ - span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, CrateId, }; use rustc_hash::FxHashMap; @@ -29,7 +29,6 @@ use triomphe::Arc; use crate::{ ast::{self, AstNode}, db::ExpandDatabase, - hygiene::Hygiene, mod_path::ModPath, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, SpanMap, @@ -56,8 +55,10 @@ pub fn expand_eager_macro_input( krate, eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, + // FIXME + call_site: SyntaxContextId::ROOT, }); - let ExpandResult { value: (arg_exp, _arg_exp_map), err: parse_err } = + let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); // we need this map here as the expansion of the eager input fake file loses whitespace ... // let mut ws_mapping = FxHashMap::default(); @@ -70,7 +71,7 @@ pub fn expand_eager_macro_input( let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( db, - &Hygiene::new(db, macro_call.file_id), + &arg_exp_map, InFile::new(arg_id.as_file(), arg_exp.syntax_node()), krate, resolver, @@ -131,6 +132,8 @@ pub fn expand_eager_macro_input( error: err.clone(), })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, + // FIXME + call_site: SyntaxContextId::ROOT, }; ExpandResult { value: Some(db.intern_macro_call(loc)), err } @@ -146,7 +149,13 @@ fn lazy_expand( let expand_to = ExpandTo::from_call_site(¯o_call.value); let ast_id = macro_call.with_value(ast_id); - let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }); + let id = def.as_lazy_macro( + db, + krate, + MacroCallKind::FnLike { ast_id, expand_to }, + // FIXME + SyntaxContextId::ROOT, + ); let macro_file = id.as_macro_file(); db.parse_macro_expansion(macro_file) @@ -155,7 +164,7 @@ fn lazy_expand( fn eager_macro_recur( db: &dyn ExpandDatabase, - hygiene: &Hygiene, + hygiene: &SpanMap, curr: InFile, krate: CrateId, macro_resolver: &dyn Fn(ModPath) -> Option, @@ -250,14 +259,13 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value: (parse, _tm), err } = + let ExpandResult { value: (parse, tm), err } = lazy_expand(db, &def, curr.with_value(call.clone()), krate); // replace macro inside - let hygiene = Hygiene::new(db, parse.file_id); let ExpandResult { value, err: error } = eager_macro_recur( db, - &hygiene, + &tm, // FIXME: We discard parse errors here parse.as_ref().map(|it| it.syntax_node()), krate, diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index e0688178ffb92..f83a9bf2d657c 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -2,32 +2,31 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. -use base_db::{span::SyntaxContextId, CrateId}; -use either::Either; -use syntax::{ - ast::{self}, - TextRange, -}; -use triomphe::Arc; +use base_db::span::{MacroCallId, SyntaxContextId}; -use crate::{ - db::ExpandDatabase, - name::{AsName, Name}, - HirFileId, InFile, -}; +use crate::db::ExpandDatabase; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { - // FIXME: This might only need to be Option? - outer_expn: HirFileId, - outer_transparency: Transparency, - parent: SyntaxContextId, + pub outer_expn: Option, + pub outer_transparency: Transparency, + pub parent: SyntaxContextId, /// This context, but with all transparent and semi-transparent expansions filtered away. - opaque: SyntaxContextId, + pub opaque: SyntaxContextId, /// This context, but with all transparent expansions filtered away. - opaque_and_semitransparent: SyntaxContextId, - /// Name of the crate to which `$crate` with this context would resolve. - dollar_crate_name: Name, + pub opaque_and_semitransparent: SyntaxContextId, +} + +impl SyntaxContextData { + pub fn root() -> Self { + SyntaxContextData { + outer_expn: None, + outer_transparency: Transparency::Opaque, + parent: SyntaxContextId::ROOT, + opaque: SyntaxContextId::ROOT, + opaque_and_semitransparent: SyntaxContextId::ROOT, + } + } } /// A property of a macro expansion that determines how identifiers @@ -50,64 +49,135 @@ pub enum Transparency { } pub(super) fn apply_mark( - _db: &dyn ExpandDatabase, - _ctxt: SyntaxContextData, - _file_id: HirFileId, - _transparency: Transparency, + db: &dyn ExpandDatabase, + ctxt: SyntaxContextId, + call_id: MacroCallId, + transparency: Transparency, ) -> SyntaxContextId { - _db.intern_syntax_context(_ctxt) -} - -// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) { -// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) -// } -// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) { -// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) -// } - -#[derive(Clone, Debug)] -pub struct Hygiene {} - -impl Hygiene { - pub fn new(_: &dyn ExpandDatabase, _: HirFileId) -> Hygiene { - Hygiene {} + if transparency == Transparency::Opaque { + return apply_mark_internal(db, ctxt, Some(call_id), transparency); } - pub fn new_unhygienic() -> Hygiene { - Hygiene {} - } + let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; + let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { + call_site_ctxt.normalize_to_macros_2_0(db) + } else { + call_site_ctxt.normalize_to_macro_rules(db) + }; - // FIXME: this should just return name - pub fn name_ref_to_name( - &self, - _: &dyn ExpandDatabase, - name_ref: ast::NameRef, - ) -> Either { - Either::Left(name_ref.as_name()) + if call_site_ctxt.is_root(db) { + return apply_mark_internal(db, ctxt, Some(call_id), transparency); } - pub fn local_inner_macros(&self, _: &dyn ExpandDatabase, _: ast::Path) -> Option { - None + // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a + // macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition. + // + // In this case, the tokens from the macros 1.0 definition inherit the hygiene + // at their invocation. That is, we pretend that the macros 1.0 definition + // was defined at its invocation (i.e., inside the macros 2.0 definition) + // so that the macros 2.0 definition remains hygienic. + // + // See the example at `test/ui/hygiene/legacy_interaction.rs`. + for (call_id, transparency) in ctxt.marks(db) { + call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency); } + apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency) } -#[derive(Clone, Debug)] -struct HygieneFrames(Arc); +fn apply_mark_internal( + db: &dyn ExpandDatabase, + ctxt: SyntaxContextId, + call_id: Option, + transparency: Transparency, +) -> SyntaxContextId { + let syntax_context_data = db.lookup_intern_syntax_context(ctxt); + let mut opaque = syntax_context_data.opaque; + let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent; + + if transparency >= Transparency::Opaque { + let parent = opaque; + let new_opaque = SyntaxContextId::SELF_REF; + // But we can't just grab the to be allocated ID either as that would not deduplicate + // things! + // So we need a new salsa store type here ... + opaque = db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque: new_opaque, + opaque_and_semitransparent: new_opaque, + }); + } -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct HygieneFrame {} + if transparency >= Transparency::SemiTransparent { + let parent = opaque_and_semitransparent; + let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF; + opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semitransparent: new_opaque_and_semitransparent, + }); + } -#[derive(Debug, Clone, PartialEq, Eq)] -struct HygieneInfo {} + let parent = ctxt; + db.intern_syntax_context(SyntaxContextData { + outer_expn: call_id, + outer_transparency: transparency, + parent, + opaque, + opaque_and_semitransparent, + }) +} +pub trait SyntaxContextExt { + fn is_root(self, db: &dyn ExpandDatabase) -> bool; + fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self; + fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self; + fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self; + fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency); + fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)>; +} -impl HygieneInfo { - fn _map_ident_up(&self, _: &dyn ExpandDatabase, _: TextRange) -> Option> { - None +#[inline(always)] +fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId { + match n { + SyntaxContextId::SELF_REF => p, + _ => n, } } -impl HygieneFrame { - pub(crate) fn new(_: &dyn ExpandDatabase, _: HirFileId) -> HygieneFrame { - HygieneFrame {} +impl SyntaxContextExt for SyntaxContextId { + fn is_root(self, db: &dyn ExpandDatabase) -> bool { + db.lookup_intern_syntax_context(self).outer_expn.is_none() + } + fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self { + handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent) + } + fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self { + handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque) + } + fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self { + db.lookup_intern_syntax_context(self).parent + } + fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency) { + let data = db.lookup_intern_syntax_context(self); + (data.outer_expn, data.outer_transparency) + } + fn marks(mut self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)> { + let mut marks = Vec::new(); + while self != SyntaxContextId::ROOT { + marks.push(self.outer_mark(db)); + self = self.parent_ctxt(db); + } + marks.reverse(); + marks } } + +// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) { +// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) +// } +// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) { +// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) +// } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index ae07cf4b151bd..6864f477ae8d2 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -24,7 +24,10 @@ use triomphe::Arc; use std::{fmt, hash::Hash, iter}; -use base_db::{span::HirFileIdRepr, CrateId, FileId, FileRange, ProcMacroKind}; +use base_db::{ + span::{HirFileIdRepr, SyntaxContextId}, + CrateId, FileId, FileRange, ProcMacroKind, +}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -105,6 +108,7 @@ pub struct MacroCallLoc { /// for the eager input macro file. eager: Option>, pub kind: MacroCallKind, + pub call_site: SyntaxContextId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -330,8 +334,9 @@ impl MacroDefId { db: &dyn db::ExpandDatabase, krate: CrateId, kind: MacroCallKind, + call_site: SyntaxContextId, ) -> MacroCallId { - db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind }) + db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) } pub fn ast_id(&self) -> Either, AstId> { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index 69aa09c4a5212..9396b08b2800f 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -7,11 +7,11 @@ use std::{ use crate::{ db::ExpandDatabase, - hygiene::Hygiene, - name::{known, Name}, + hygiene::{SyntaxContextExt, Transparency}, + name::{known, AsName, Name}, + SpanMap, }; -use base_db::CrateId; -use either::Either; +use base_db::{span::SyntaxContextId, CrateId}; use smallvec::SmallVec; use syntax::{ast, AstNode}; @@ -38,6 +38,7 @@ pub enum PathKind { Crate, /// Absolute path (::foo) Abs, + // FIXME: Remove this /// `$crate` from macro expansion DollarCrate(CrateId), } @@ -46,7 +47,7 @@ impl ModPath { pub fn from_src( db: &dyn ExpandDatabase, path: ast::Path, - hygiene: &Hygiene, + hygiene: &SpanMap, ) -> Option { convert_path(db, None, path, hygiene) } @@ -193,7 +194,7 @@ fn convert_path( db: &dyn ExpandDatabase, prefix: Option, path: ast::Path, - hygiene: &Hygiene, + hygiene: &SpanMap, ) -> Option { let prefix = match path.qualifier() { Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?), @@ -203,23 +204,26 @@ fn convert_path( let segment = path.segment()?; let mut mod_path = match segment.kind()? { ast::PathSegmentKind::Name(name_ref) => { - match hygiene.name_ref_to_name(db, name_ref) { - Either::Left(name) => { - // no type args in use - let mut res = prefix.unwrap_or_else(|| { - ModPath::from_kind( - segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), - ) - }); - res.segments.push(name); - res - } - Either::Right(crate_id) => { - return Some(ModPath::from_segments( - PathKind::DollarCrate(crate_id), - iter::empty(), - )) + if name_ref.text() == "$crate" { + if prefix.is_some() { + return None; } + resolve_crate_root( + db, + hygiene + .span_for_range(name_ref.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx), + ) + .map(PathKind::DollarCrate) + .map(ModPath::from_kind)? + } else { + let mut res = prefix.unwrap_or_else(|| { + ModPath::from_kind( + segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), + ) + }); + res.segments.push(name_ref.as_name()); + res } } ast::PathSegmentKind::SelfTypeKw => { @@ -261,8 +265,15 @@ fn convert_path( // We follow what it did anyway :) if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - if let Some(crate_id) = hygiene.local_inner_macros(db, path) { - mod_path.kind = PathKind::DollarCrate(crate_id); + let syn_ctx = hygiene + .span_for_range(segment.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx); + if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn { + if db.lookup_intern_macro_call(macro_call_id).def.local_inner { + if let Some(crate_root) = resolve_crate_root(db, syn_ctx) { + mod_path.kind = PathKind::DollarCrate(crate_root); + } + } } } } @@ -270,6 +281,40 @@ fn convert_path( Some(mod_path) } +pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option { + // When resolving `$crate` from a `macro_rules!` invoked in a `macro`, + // we don't want to pretend that the `macro_rules!` definition is in the `macro` + // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks. + // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!` + // definitions actually produced by `macro` and `macro` definitions produced by + // `macro_rules!`, but at least such configurations are not stable yet. + ctxt = ctxt.normalize_to_macro_rules(db); + let mut iter = ctxt.marks(db).into_iter().rev().peekable(); + let mut result_mark = None; + // Find the last opaque mark from the end if it exists. + while let Some(&(mark, transparency)) = iter.peek() { + if transparency == Transparency::Opaque { + result_mark = Some(mark); + iter.next(); + } else { + break; + } + } + // Then find the last semi-transparent mark from the end if it exists. + for (mark, transparency) in iter { + if transparency == Transparency::SemiTransparent { + result_mark = Some(mark); + } else { + break; + } + } + + match result_mark { + Some(Some(call)) => Some(db.lookup_intern_macro_call(call.into()).def.krate), + Some(None) | None => None, + } +} + pub use crate::name as __name; #[macro_export] diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index a876f48bda4e6..a321f94cd7553 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -470,6 +470,7 @@ pub mod known { pub const SELF_TYPE: super::Name = super::Name::new_inline("Self"); pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static"); + pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate"); #[macro_export] macro_rules! name { diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 9ccf467358ece..0712fc1c50c7f 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -23,7 +23,7 @@ use hir_def::{ EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{hygiene::Hygiene, name::Name}; +use hir_expand::name::Name; use intern::{Internable, Interned}; use itertools::Itertools; use la_arena::ArenaMap; @@ -1732,11 +1732,11 @@ impl HirDisplay for TypeRef { f.write_joined(bounds, " + ")?; } TypeRef::Macro(macro_call) => { - let macro_call = macro_call.to_node(f.db.upcast()); let ctx = hir_def::lower::LowerCtx::with_hygiene( f.db.upcast(), - &Hygiene::new_unhygienic(), + f.db.span_map(macro_call.file_id), ); + let macro_call = macro_call.to_node(f.db.upcast()); match macro_call.path() { Some(path) => match Path::from_src(path, &ctx) { Some(path) => path.hir_fmt(f)?, diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs index 7d19e0a19169a..a3383b2b5dfbc 100644 --- a/crates/hir-ty/src/test_db.rs +++ b/crates/hir-ty/src/test_db.rs @@ -7,7 +7,7 @@ use base_db::{ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; use hir_def::{db::DefDatabase, ModuleId}; -use hir_expand::db::ExpandDatabase; +use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData}; use nohash_hasher::IntMap; use rustc_hash::FxHashSet; use syntax::TextRange; @@ -30,6 +30,7 @@ pub(crate) struct TestDB { impl Default for TestDB { fn default() -> Self { let mut this = Self { storage: Default::default(), events: Default::default() }; + this.intern_syntax_context(SyntaxContextData::root()); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 796490abd7f23..bb02620208bf1 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -8,7 +8,7 @@ use hir_def::{ resolver::{HasResolver, Resolver, TypeNs}, AssocItemId, AttrDefId, ModuleDefId, }; -use hir_expand::{hygiene::Hygiene, name::Name}; +use hir_expand::name::Name; use hir_ty::db::HirDatabase; use syntax::{ast, AstNode}; @@ -234,7 +234,7 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { if ast_path.syntax().text() != link { return None; } - ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic()) + ModPath::from_src(db.upcast(), ast_path, &Default::default()) }; let full = try_get_modpath(link); diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index f4129e736ee13..cc3e869aa7ce5 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -6,7 +6,7 @@ pub use hir_def::db::*; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery, + ExpandProcMacroQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 50d88b4cf8389..b224b4b3a97c9 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -137,10 +137,7 @@ pub use { // These are negative re-exports: pub using these names is forbidden, they // should remain private to hir internals. #[allow(unused)] -use { - hir_def::path::Path, - hir_expand::{hygiene::Hygiene, name::AsName}, -}; +use {hir_def::path::Path, hir_expand::name::AsName}; /// hir::Crate describes a single crate. It's the main interface with which /// a crate's dependencies interact. Mostly, it should be just a proxy for the diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 758e6118aa123..825d68cdbaed6 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -842,8 +842,8 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_trait(&self, path: &ast::Path) -> Option { let analyze = self.analyze(path.syntax())?; - let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); - let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene); + let hygiene = self.db.span_map(analyze.file_id); + let ctx = LowerCtx::with_hygiene(self.db.upcast(), hygiene); let hir_path = Path::from_src(path.clone(), &ctx)?; match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8fc7f2c05d745..aaad82e128763 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -26,7 +26,6 @@ use hir_def::{ }; use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, - hygiene::Hygiene, mod_path::path, name, name::{AsName, Name}, @@ -596,8 +595,7 @@ impl SourceAnalyzer { } // This must be a normal source file rather than macro file. - let hygiene = Hygiene::new(db.upcast(), self.file_id); - let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene); + let ctx = LowerCtx::with_hygiene(db.upcast(), db.span_map(self.file_id)); let hir_path = Path::from_src(path.clone(), &ctx)?; // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 65eaa6510f0ed..dc77424c13144 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -103,7 +103,6 @@ impl RootDatabase { hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery - hir::db::HygieneFrameQuery // DefDatabase hir::db::FileItemTreeQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 258d893b47da0..957c9ad26ca20 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -208,7 +208,6 @@ impl RootDatabase { hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery - hir_db::HygieneFrameQuery hir_db::ParseMacroExpansionErrorQuery // DefDatabase diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 2ead71645639a..eb0773570b325 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -53,7 +53,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt); + let res = rules[&id].expand(&tt, |_| ()); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -124,7 +124,7 @@ fn invocation_fixtures( for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(&subtree).err.is_none() { + if it.expand(&subtree, |_| ()).err.is_none() { res.push((name.clone(), subtree)); break; } diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index fac2b33758108..487e8b35980db 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -14,6 +14,7 @@ use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; pub(crate) fn expand_rules( rules: &[crate::Rule], input: &tt::Subtree, + marker: impl Fn(&mut S) + Copy, is_2021: bool, ) -> ExpandResult> { let mut match_: Option<(matcher::Match, &crate::Rule)> = None; @@ -25,7 +26,7 @@ pub(crate) fn expand_rules( // Unconditionally returning the transcription here makes the // `test_repeat_bad_var` test fail. let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &new_match.bindings); + transcriber::transcribe(&rule.rhs, &new_match.bindings, marker); if transcribe_err.is_none() { return ExpandResult::ok(value); } @@ -44,7 +45,7 @@ pub(crate) fn expand_rules( if let Some((match_, rule)) = match_ { // if we got here, there was no match without errors let ExpandResult { value, err: transcribe_err } = - transcriber::transcribe(&rule.rhs, &match_.bindings); + transcriber::transcribe(&rule.rhs, &match_.bindings, marker); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( @@ -129,7 +130,7 @@ enum Fragment { /// At one point in time, we tried to use "fake" delimiters here à la /// proc-macro delimiter=none. As we later discovered, "none" delimiters are /// tricky to handle in the parser, and rustc doesn't handle those either. - Expr(tt::TokenTree), + Expr(tt::Subtree), /// There are roughly two types of paths: paths in expression context, where a /// separator `::` between an identifier and its following generic argument list /// is mandatory, and paths in type context, where `::` can be omitted. @@ -139,5 +140,5 @@ enum Fragment { /// and is trasncribed as an expression-context path, verbatim transcription /// would cause a syntax error. We need to fix it up just before transcribing; /// see `transcriber::fix_up_and_push_path_tt()`. - Path(tt::TokenTree), + Path(tt::Subtree), } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 796c9f2eb3211..14b32599091e4 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -765,7 +765,7 @@ fn match_meta_var( MetaVarKind::Path => { return input .expect_fragment(parser::PrefixEntryPoint::Path) - .map(|it| it.map(Fragment::Path)); + .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path)); } MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, @@ -793,7 +793,7 @@ fn match_meta_var( }; return input .expect_fragment(parser::PrefixEntryPoint::Expr) - .map(|tt| tt.map(Fragment::Expr)); + .map(|tt| tt.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Expr)); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { let tt_result = match kind { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 4f5cd0480c78d..c8b326fa6c535 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -11,21 +11,19 @@ use crate::{ }; impl Bindings { - fn contains(&self, name: &str) -> bool { - self.inner.contains_key(name) - } - fn get(&self, name: &str) -> Result<&Binding, ExpandError> { match self.inner.get(name) { Some(binding) => Ok(binding), - None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))), + None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))), } } fn get_fragment( &self, name: &str, + mut span: S, nesting: &mut [NestingState], + marker: impl Fn(&mut S), ) -> Result, ExpandError> { macro_rules! binding_err { ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; @@ -48,54 +46,75 @@ impl Bindings { }; } match b { - Binding::Fragment(it) => Ok(it.clone()), - // emit some reasonable default expansion for missing bindings, - // this gives better recovery than emitting the `$fragment-name` verbatim - Binding::Missing(it) => Ok(match it { - MetaVarKind::Stmt => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - span: S::DUMMY, - char: ';', - spacing: tt::Spacing::Alone, - }))) - } - MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => { + let tt::Subtree { delimiter, token_trees } = sub; + marker(&mut span); + let subtree = tt::Subtree { delimiter: tt::Delimiter { - open: S::DUMMY, - close: S::DUMMY, - kind: tt::DelimiterKind::Brace, + // TODO split span + open: span, + close: span, + kind: delimiter.kind, }, - token_trees: vec![], - })), - // FIXME: Meta and Item should get proper defaults - MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { - Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: token_trees.clone(), + }; + Ok(match f { + Fragment::Tokens(_) => unreachable!(), + Fragment::Expr(_) => Fragment::Expr, + Fragment::Path(_) => Fragment::Path, + }(subtree)) + } + Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()), + // emit some reasonable default expansion for missing bindings, + // this gives better recovery than emitting the `$fragment-name` verbatim + Binding::Missing(it) => Ok({ + marker(&mut span); + match it { + MetaVarKind::Stmt => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + span, + char: ';', + spacing: tt::Spacing::Alone, + }))) + } + MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: S::DUMMY, + close: S::DUMMY, + kind: tt::DelimiterKind::Brace, + }, token_trees: vec![], - })) - } - MetaVarKind::Path - | MetaVarKind::Ty - | MetaVarKind::Pat - | MetaVarKind::PatParam - | MetaVarKind::Expr - | MetaVarKind::Ident => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("missing"), - span: S::DUMMY, - }))) - } - MetaVarKind::Lifetime => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("'missing"), - span: S::DUMMY, - }))) - } - MetaVarKind::Literal => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_inline("\"missing\""), - span: S::DUMMY, - }))) + })), + // FIXME: Meta and Item should get proper defaults + MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: vec![], + })) + } + MetaVarKind::Path + | MetaVarKind::Ty + | MetaVarKind::Pat + | MetaVarKind::PatParam + | MetaVarKind::Expr + | MetaVarKind::Ident => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("missing"), + span, + }))) + } + MetaVarKind::Lifetime => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("'missing"), + span, + }))) + } + MetaVarKind::Literal => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("\"missing\""), + span, + }))) + } } }), Binding::Nested(_) => { @@ -111,10 +130,11 @@ impl Bindings { pub(super) fn transcribe( template: &MetaTemplate, bindings: &Bindings, + marker: impl Fn(&mut S) + Copy, ) -> ExpandResult> { let mut ctx = ExpandCtx { bindings, nesting: Vec::new() }; let mut arena: Vec> = Vec::new(); - expand_subtree(&mut ctx, template, None, &mut arena) + expand_subtree(&mut ctx, template, None, &mut arena, marker) } #[derive(Debug)] @@ -139,40 +159,65 @@ fn expand_subtree( template: &MetaTemplate, delimiter: Option>, arena: &mut Vec>, + marker: impl Fn(&mut S) + Copy, ) -> ExpandResult> { // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation let start_elements = arena.len(); let mut err = None; 'ops: for op in template.iter() { match op { - Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()), - Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Literal(it) => arena.push( + tt::Leaf::from({ + let mut it = it.clone(); + marker(&mut it.span); + it + }) + .into(), + ), + Op::Ident(it) => arena.push( + tt::Leaf::from({ + let mut it = it.clone(); + marker(&mut it.span); + it + }) + .into(), + ), Op::Punct(puncts) => { for punct in puncts { - arena.push(tt::Leaf::from(*punct).into()); + arena.push( + tt::Leaf::from({ + let mut it = punct.clone(); + marker(&mut it.span); + it + }) + .into(), + ); } } Op::Subtree { tokens, delimiter } => { + let mut delimiter = *delimiter; + marker(&mut delimiter.open); + marker(&mut delimiter.close); let ExpandResult { value: tt, err: e } = - expand_subtree(ctx, tokens, Some(*delimiter), arena); + expand_subtree(ctx, tokens, Some(delimiter), arena, marker); err = err.or(e); arena.push(tt.into()); } Op::Var { name, id, .. } => { - let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id); + let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker); err = err.or(e); push_fragment(arena, fragment); } Op::Repeat { tokens: subtree, kind, separator } => { let ExpandResult { value: fragment, err: e } = - expand_repeat(ctx, subtree, *kind, separator, arena); + expand_repeat(ctx, subtree, *kind, separator, arena, marker); err = err.or(e); push_fragment(arena, fragment) } Op::Ignore { name, id } => { // Expand the variable, but ignore the result. This registers the repetition count. // FIXME: Any emitted errors are dropped. - expand_var(ctx, name, *id); + expand_var(ctx, name, *id, marker); } Op::Index { depth } => { let index = @@ -258,42 +303,42 @@ fn expand_var( ctx: &mut ExpandCtx<'_, S>, v: &SmolStr, id: S, + marker: impl Fn(&mut S), ) -> ExpandResult> { // We already handle $crate case in mbe parser debug_assert!(v != "crate"); - if !ctx.bindings.contains(v) { - // Note that it is possible to have a `$var` inside a macro which is not bound. - // For example: - // ``` - // macro_rules! foo { - // ($a:ident, $b:ident, $c:tt) => { - // macro_rules! bar { - // ($bi:ident) => { - // fn $bi() -> u8 {$c} - // } - // } - // } - // ``` - // We just treat it a normal tokens - let tt = tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, - token_trees: vec![ - tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) - .into(), - tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), - ], + match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) { + Ok(it) => ExpandResult::ok(it), + Err(ExpandError::UnresolvedBinding(_)) => { + // Note that it is possible to have a `$var` inside a macro which is not bound. + // For example: + // ``` + // macro_rules! foo { + // ($a:ident, $b:ident, $c:tt) => { + // macro_rules! bar { + // ($bi:ident) => { + // fn $bi() -> u8 {$c} + // } + // } + // } + // ``` + // We just treat it a normal tokens + let tt = tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: vec![ + tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) + .into(), + tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), + ], + } + .into(); + ExpandResult::ok(Fragment::Tokens(tt)) } - .into(); - ExpandResult::ok(Fragment::Tokens(tt)) - } else { - ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else( - |e| ExpandResult { - value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())), - err: Some(e), - }, - ExpandResult::ok, - ) + Err(e) => ExpandResult { + value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())), + err: Some(e), + }, } } @@ -303,6 +348,7 @@ fn expand_repeat( kind: RepeatKind, separator: &Option>, arena: &mut Vec>, + marker: impl Fn(&mut S) + Copy, ) -> ExpandResult> { let mut buf: Vec> = Vec::new(); ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); @@ -314,7 +360,8 @@ fn expand_repeat( let mut err = None; loop { - let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena); + let ExpandResult { value: mut t, err: e } = + expand_subtree(ctx, template, None, arena, marker); let nesting_state = ctx.nesting.last_mut().unwrap(); if nesting_state.at_end || !nesting_state.hit { break; @@ -391,7 +438,7 @@ fn expand_repeat( fn push_fragment(buf: &mut Vec>, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), - Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => { + Fragment::Expr(mut tt) => { if tt.delimiter.kind == tt::DelimiterKind::Invisible { tt.delimiter = tt::Delimiter { open: S::DUMMY, @@ -401,8 +448,8 @@ fn push_fragment(buf: &mut Vec>, fragment: Fragment } buf.push(tt.into()) } - Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt), - Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt), + Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), + Fragment::Tokens(tt) => buf.push(tt), } } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 43543479eb94a..482d0157b2dbd 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -72,6 +72,7 @@ impl fmt::Display for ParseError { #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum ExpandError { BindingError(Box>), + UnresolvedBinding(Box>), LeftoverTokens, ConversionError, LimitExceeded, @@ -94,6 +95,10 @@ impl fmt::Display for ExpandError { ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"), ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), ExpandError::BindingError(e) => f.write_str(e), + ExpandError::UnresolvedBinding(binding) => { + f.write_str("could not find binding ")?; + f.write_str(binding) + } ExpandError::ConversionError => f.write_str("could not convert tokens"), ExpandError::LimitExceeded => f.write_str("Expand exceed limit"), ExpandError::LeftoverTokens => f.write_str("leftover tokens"), @@ -233,8 +238,12 @@ impl DeclarativeMacro { self.err.as_deref() } - pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult> { - expander::expand_rules(&self.rules, &tt, self.is_2021) + pub fn expand( + &self, + tt: &tt::Subtree, + marker: impl Fn(&mut S) + Copy, + ) -> ExpandResult> { + expander::expand_rules(&self.rules, &tt, marker, self.is_2021) } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index ab272862cdb50..b843db510e39c 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -155,10 +155,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec break, - Some(tt @ tt::TokenTree::Leaf(_)) => { - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] } - } - Some(tt::TokenTree::Subtree(tt)) => tt, + Some(tt) => tt.subtree_or_wrap(), }); let mut fork = iter.clone(); @@ -720,6 +717,7 @@ where /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { + // TODO: FIXME this breaks the hygiene map let (text, _span) = match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { (lit.text.as_str(), lit.span) diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 1af50c8b3b950..c825bd01bcd63 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -20,11 +20,12 @@ pub struct TokenMap { // then a bin search on the ast id pub span_map: Vec<(TextRange, S)>, // span_map2: rustc_hash::FxHashMap, + pub real_file: bool, } impl Default for TokenMap { fn default() -> Self { - Self { span_map: Vec::new() } + Self { span_map: Vec::new(), real_file: true } } } @@ -49,8 +50,21 @@ impl TokenMap { ) } + // FIXME: Should be infallible pub fn span_for_range(&self, range: TextRange) -> Option { - self.span_map.iter().find_map(|(r, s)| if r == &range { Some(s.clone()) } else { None }) + // TODO FIXME: make this proper + self.span_map + .iter() + .filter_map(|(r, s)| Some((r, s, r.intersect(range)?))) + .max_by_key(|(_, _, intersection)| intersection.len()) + .map(|(_, &s, _)| s) + .or_else(|| { + if self.real_file { + None + } else { + panic!("no span for range {range:?} in {:#?}", self.span_map) + } + }) } // pub fn ranges_by_token( diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index a384af2a9a33d..73e75a051bd66 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -56,6 +56,7 @@ pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq { const DUMMY: Self; } +// FIXME: Get rid of this trait? pub trait Span: std::fmt::Debug + Copy + Sized + Eq { const DUMMY: Self; } @@ -72,7 +73,16 @@ pub enum TokenTree { impl_from!(Leaf, Subtree for TokenTree); impl TokenTree { pub const fn empty() -> Self { - Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }) + Self::Subtree(Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![] }) + } + + pub fn subtree_or_wrap(self) -> Subtree { + match self { + TokenTree::Leaf(_) => { + Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![self] } + } + TokenTree::Subtree(s) => s, + } } } From 8423893d1ce33659adc42e26850069a8e7087c30 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 19 Nov 2023 12:27:22 +0100 Subject: [PATCH 07/80] More incremental tests --- .../hir-def/src/nameres/tests/incremental.rs | 226 ++++++++++++++---- 1 file changed, 179 insertions(+), 47 deletions(-) diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 977745c476f15..3d7784bdd5428 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,13 +1,18 @@ -use base_db::SourceDatabaseExt; +use base_db::{SourceDatabase, SourceDatabaseExt}; use triomphe::Arc; -use crate::{db::DefDatabase, AdtId, ModuleDefId}; - -use super::*; +use crate::{ + db::DefDatabase, + nameres::tests::{TestDB, WithFixture}, + AdtId, ModuleDefId, +}; fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { let (mut db, pos) = TestDB::with_position(ra_fixture_initial); - let krate = db.test_crate(); + let krate = { + let crate_graph = db.crate_graph(); + crate_graph.iter().last().unwrap() + }; { let events = db.log_executed(|| { db.crate_def_map(krate); @@ -28,39 +33,39 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: fn typing_inside_a_function_should_not_invalidate_def_map() { check_def_map_is_not_recomputed( r" - //- /lib.rs - mod foo;$0 +//- /lib.rs +mod foo;$0 - use crate::foo::bar::Baz; +use crate::foo::bar::Baz; - enum E { A, B } - use E::*; +enum E { A, B } +use E::*; - fn foo() -> i32 { - 1 + 1 - } +fn foo() -> i32 { + 1 + 1 +} - #[cfg(never)] - fn no() {} - //- /foo/mod.rs - pub mod bar; +#[cfg(never)] +fn no() {} +//- /foo/mod.rs +pub mod bar; - //- /foo/bar.rs - pub struct Baz; - ", +//- /foo/bar.rs +pub struct Baz; +", r" - mod foo; +mod foo; - use crate::foo::bar::Baz; +use crate::foo::bar::Baz; - enum E { A, B } - use E::*; +enum E { A, B } +use E::*; - fn foo() -> i32 { 92 } +fn foo() -> i32 { 92 } - #[cfg(never)] - fn no() {} - ", +#[cfg(never)] +fn no() {} +", ); } @@ -68,30 +73,157 @@ fn typing_inside_a_function_should_not_invalidate_def_map() { fn typing_inside_a_macro_should_not_invalidate_def_map() { check_def_map_is_not_recomputed( r" - //- /lib.rs - macro_rules! m { - ($ident:ident) => { - fn f() { - $ident + $ident; - }; - } - } - mod foo; +//- /lib.rs +macro_rules! m { + ($ident:ident) => { + fn f() { + $ident + $ident; + }; + } +} +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +m!(X); + +pub struct S {} +", + r" +m!(Y); + +pub struct S {} +", + ); +} - //- /foo/mod.rs - pub mod bar; +#[test] +fn typing_inside_an_attribute_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity +//- /lib.rs +mod foo; - //- /foo/bar.rs - $0 - m!(X); +//- /foo/mod.rs +pub mod bar; - pub struct S {} - ", +//- /foo/bar.rs +$0 +#[proc_macros::identity] +fn f() {} +", r" - m!(Y); +#[proc_macros::identity] +fn f() { foo } +", + ); +} + +#[test] +fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity +//- /lib.rs +mod foo; + +//- /foo/mod.rs +pub mod bar; - pub struct S {} - ", +//- /foo/bar.rs +$0 +#[proc_macros::identity] +fn f() {} +", + r" +#[proc_macros::identity(foo)] +fn f() {} +", + ); +} +#[test] +fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: identity, derive_identity +//- /lib.rs +macro_rules! m { + ($ident:ident) => { + fn fm() { + $ident + $ident; + }; + } +} +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +fn f() {} + +m!(X); +macro_rules! m2 { + ($ident:ident) => { + fn f2() { + $ident + $ident; + }; + } +} +m2!(X); + +#[proc_macros::identity] +#[derive(proc_macros::DeriveIdentity)] +pub struct S {} +", + r" +fn f() {0} + +m!(X); +macro_rules! m2 { + ($ident:ident) => { + fn f2() { + $ident + $ident; + }; + } +} +m2!(X); + +#[proc_macros::identity] +#[derive(proc_macros::DeriveIdentity)] +pub struct S {} +", + ); +} + +#[test] +fn typing_inside_a_derive_should_not_invalidate_def_map() { + check_def_map_is_not_recomputed( + r" +//- proc_macros: derive_identity +//- minicore:derive +//- /lib.rs +mod foo; + +//- /foo/mod.rs +pub mod bar; + +//- /foo/bar.rs +$0 +#[derive(proc_macros::DeriveIdentity)] +#[allow()] +struct S; +", + r" +#[derive(proc_macros::DeriveIdentity)] +#[allow(dead_code)] +struct S; +", ); } From 394d11b0fa2cb40c92f5cfa7e664d5fc425073aa Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 19 Nov 2023 18:42:25 +0100 Subject: [PATCH 08/80] Fix float-split hack not setting up spans correctly --- crates/base-db/src/span.rs | 6 ++ crates/hir-def/src/lib.rs | 7 +-- .../hir-def/src/macro_expansion_tests/mbe.rs | 57 +++++++++++++----- .../hir-def/src/macro_expansion_tests/mod.rs | 30 +++++++--- .../src/macro_expansion_tests/proc_macros.rs | 59 +++++++++++++++++++ crates/hir-def/src/nameres/collector.rs | 13 ++-- crates/hir-def/src/nameres/tests.rs | 4 +- crates/hir-def/src/test_db.rs | 4 +- crates/hir-expand/src/attrs.rs | 30 ++++++++-- crates/hir-expand/src/db.rs | 8 ++- crates/hir-ty/src/test_db.rs | 4 +- crates/hir-ty/src/tests/macros.rs | 12 ++-- crates/ide-db/src/lib.rs | 1 + crates/mbe/src/syntax_bridge.rs | 12 +++- crates/mbe/src/token_map.rs | 8 +-- 15 files changed, 197 insertions(+), 58 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index f430a36ddfb49..600aa079333eb 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -16,6 +16,12 @@ pub type SpanData = tt::SpanData; pub struct SyntaxContextId(InternId); crate::impl_intern_key!(SyntaxContextId); +impl fmt::Display for SyntaxContextId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0.as_u32()) + } +} + impl SyntaxContext for SyntaxContextId { const DUMMY: Self = Self::ROOT; // veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 0da605819f9e6..65f1dcc850198 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -1322,6 +1322,7 @@ fn derive_macro_as_call_id( item_attr: &AstIdWithPath, derive_attr_index: AttrId, derive_pos: u32, + call_site: SyntaxContextId, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { @@ -1336,8 +1337,7 @@ fn derive_macro_as_call_id( derive_index: derive_pos, derive_attr_index, }, - //FIXME - SyntaxContextId::ROOT, + call_site, ); Ok((macro_id, def_id, call_id)) } @@ -1367,8 +1367,7 @@ fn attr_macro_as_call_id( attr_args: Arc::new(arg), invoc_attr_index: macro_attr.id, }, - //FIXME - SyntaxContextId::ROOT, + macro_attr.ctxt, ) } intern::impl_internable!( diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index af4b3e12b9f3e..dcecec4e8e4e9 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -23,10 +23,12 @@ macro_rules! f { }; } -// +tokenids +// +spans f!(struct MyTraitMap2); "#, - // FIXME: #SpanAnchor(FileId(0), 1)@91..92 why is there whitespace annotated with a span here? + // FIXME: #SpanAnchor(FileId(0), 1)@91..92\2# why is there whitespace annotated with a span + // here? Presumably because the leading `::` is getting two spans instead of one? Sounds + // liek glueing might be failing here expect![[r#" macro_rules! f { ( struct $ident:ident ) => { @@ -36,9 +38,9 @@ macro_rules! f { }; } -struct#SpanAnchor(FileId(0), 1)@58..64 MyTraitMap2#SpanAnchor(FileId(0), 2)@23..34 {#SpanAnchor(FileId(0), 1)@72..73 - map#SpanAnchor(FileId(0), 1)@86..89:#SpanAnchor(FileId(0), 1)@89..90 #SpanAnchor(FileId(0), 1)@91..92::#SpanAnchor(FileId(0), 1)@92..93std#SpanAnchor(FileId(0), 1)@93..96::#SpanAnchor(FileId(0), 1)@97..98collections#SpanAnchor(FileId(0), 1)@98..109::#SpanAnchor(FileId(0), 1)@110..111HashSet#SpanAnchor(FileId(0), 1)@111..118<#SpanAnchor(FileId(0), 1)@118..119(#SpanAnchor(FileId(0), 1)@119..120)#SpanAnchor(FileId(0), 1)@120..121>#SpanAnchor(FileId(0), 1)@121..122,#SpanAnchor(FileId(0), 1)@122..123 -}#SpanAnchor(FileId(0), 1)@132..133 +struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@20..31\0# {#FileId(0):1@72..73\2# + map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@91..92\2#::#FileId(0):1@92..93\2#std#FileId(0):1@93..96\2#::#FileId(0):1@97..98\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@110..111\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# +}#FileId(0):1@132..133\2# "#]], ); } @@ -49,18 +51,19 @@ fn token_mapping_floats() { // (and related issues) check( r#" -// +tokenids +// +spans macro_rules! f { ($($tt:tt)*) => { $($tt)* }; } -// +tokenids +// +spans f! { fn main() { 1; 1.0; + ((1,),).0.0; let x = 1; } } @@ -68,18 +71,19 @@ f! { "#, expect![[r#" -// +tokenids +// +spans macro_rules! f { ($($tt:tt)*) => { $($tt)* }; } -fn#SpanAnchor(FileId(0), 2)@22..24 main#SpanAnchor(FileId(0), 2)@25..29(#SpanAnchor(FileId(0), 2)@29..30)#SpanAnchor(FileId(0), 2)@30..31 {#SpanAnchor(FileId(0), 2)@32..33 - 1#SpanAnchor(FileId(0), 2)@42..43;#SpanAnchor(FileId(0), 2)@43..44 - 1.0#SpanAnchor(FileId(0), 2)@53..56;#SpanAnchor(FileId(0), 2)@56..57 - let#SpanAnchor(FileId(0), 2)@66..69 x#SpanAnchor(FileId(0), 2)@70..71 =#SpanAnchor(FileId(0), 2)@72..73 1#SpanAnchor(FileId(0), 2)@74..75;#SpanAnchor(FileId(0), 2)@75..76 -}#SpanAnchor(FileId(0), 2)@81..82 +fn#FileId(0):2@19..21\0# main#FileId(0):2@22..26\0#(#FileId(0):2@26..27\0#)#FileId(0):2@27..28\0# {#FileId(0):2@29..30\0# + 1#FileId(0):2@39..40\0#;#FileId(0):2@40..41\0# + 1.0#FileId(0):2@50..53\0#;#FileId(0):2@53..54\0# + (#FileId(0):2@63..64\0#(#FileId(0):2@64..65\0#1#FileId(0):2@65..66\0#,#FileId(0):2@66..67\0# )#FileId(0):2@67..68\0#,#FileId(0):2@68..69\0# )#FileId(0):2@69..70\0#.#FileId(0):2@70..71\0#0#FileId(0):2@71..74\0#.#FileId(0):2@71..74\0#0#FileId(0):2@71..74\0#;#FileId(0):2@74..75\0# + let#FileId(0):2@84..87\0# x#FileId(0):2@88..89\0# =#FileId(0):2@90..91\0# 1#FileId(0):2@92..93\0#;#FileId(0):2@93..94\0# +}#FileId(0):2@99..100\0# "#]], @@ -123,7 +127,7 @@ macro_rules! identity { } fn main(foo: ()) { - format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + format_args/*+spans*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") } "#, @@ -137,13 +141,36 @@ macro_rules! identity { } fn main(foo: ()) { - builtin#SpanAnchor(FileId(0), 0)@0..0 ##SpanAnchor(FileId(0), 0)@0..0format_args#SpanAnchor(FileId(0), 0)@0..0 (#SpanAnchor(FileId(0), 6)@25..26"{} {} {}"#SpanAnchor(FileId(0), 6)@26..36,#SpanAnchor(FileId(0), 6)@36..37 format_args#SpanAnchor(FileId(0), 6)@38..49!#SpanAnchor(FileId(0), 6)@49..50(#SpanAnchor(FileId(0), 6)@50..51"{}"#SpanAnchor(FileId(0), 6)@51..55,#SpanAnchor(FileId(0), 6)@55..56 0#SpanAnchor(FileId(0), 6)@57..58)#SpanAnchor(FileId(0), 6)@58..59,#SpanAnchor(FileId(0), 6)@59..60 foo#SpanAnchor(FileId(0), 6)@61..64,#SpanAnchor(FileId(0), 6)@64..65 identity#SpanAnchor(FileId(0), 6)@66..74!#SpanAnchor(FileId(0), 6)@74..75(#SpanAnchor(FileId(0), 6)@75..7610#SpanAnchor(FileId(0), 6)@76..78)#SpanAnchor(FileId(0), 6)@78..79,#SpanAnchor(FileId(0), 6)@79..80 "bar"#SpanAnchor(FileId(0), 6)@81..86)#SpanAnchor(FileId(0), 6)@86..87 + builtin#FileId(0):0@0..0\0# ##FileId(0):0@0..0\0#format_args#FileId(0):0@0..0\0# (#FileId(0):6@22..23\0#"{} {} {}"#FileId(0):6@23..33\0#,#FileId(0):6@33..34\0# format_args#FileId(0):6@35..46\0#!#FileId(0):6@46..47\0#(#FileId(0):6@47..48\0#"{}"#FileId(0):6@48..52\0#,#FileId(0):6@52..53\0# 0#FileId(0):6@54..55\0#)#FileId(0):6@55..56\0#,#FileId(0):6@56..57\0# foo#FileId(0):6@58..61\0#,#FileId(0):6@61..62\0# identity#FileId(0):6@63..71\0#!#FileId(0):6@71..72\0#(#FileId(0):6@72..73\0#10#FileId(0):6@73..75\0#)#FileId(0):6@75..76\0#,#FileId(0):6@76..77\0# "bar"#FileId(0):6@78..83\0#)#FileId(0):6@83..84\0# } "##]], ); } +#[test] +fn token_mapping_across_files() { + check( + r#" +//- /lib.rs +#[macro_use] +mod foo; + +mk_struct/*+spans*/!(Foo with u32); +//- /foo.rs +macro_rules! mk_struct { + ($foo:ident with $ty:ty) => { struct $foo($ty); } +} +"#, + expect![[r#" +#[macro_use] +mod foo; + +struct#FileId(1):1@59..65\2# Foo#FileId(0):2@21..24\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@30..33\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2# +"#]], + ); +} + #[test] fn float_field_access_macro_input() { check( diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index d4902c52e748d..f770d2832efb7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -103,11 +103,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream for (call, exp) in expansions.into_iter().rev() { let mut tree = false; let mut expect_errors = false; - let mut show_token_ids = false; + let mut show_spans = false; for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { tree |= comment.to_string().contains("+tree"); expect_errors |= comment.to_string().contains("+errors"); - show_token_ids |= comment.to_string().contains("+tokenids"); + show_spans |= comment.to_string().contains("+spans"); } let mut expn_text = String::new(); @@ -128,10 +128,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream parse.syntax_node(), ); } - let pp = pretty_print_macro_expansion( - parse.syntax_node(), - show_token_ids.then_some(&*token_map), - ); + let pp = + pretty_print_macro_expansion(parse.syntax_node(), show_spans.then_some(&*token_map)); let indent = IndentLevel::from_node(call.syntax()); let pp = reindent(indent, pp); format_to!(expn_text, "{}", pp); @@ -166,9 +164,18 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } _ => None, }; + if let Some(src) = src { if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) { - let pp = pretty_print_macro_expansion(src.value, None); + let call = src.file_id.call_node(&db).expect("macro file"); + let mut show_spans = false; + for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) { + show_spans |= comment.to_string().contains("+spans"); + } + let pp = pretty_print_macro_expansion( + src.value, + show_spans.then_some(&db.span_map(src.file_id)), + ); format_to!(expanded_text, "\n{}", pp) } } @@ -250,7 +257,14 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> Stri format_to!(res, "{}", token); if let Some(map) = map { if let Some(span) = map.span_for_range(token.text_range()) { - format_to!(res, "#{:?}@{:?}", span.anchor, span.range); + format_to!( + res, + "#{:?}:{:?}@{:?}\\{}#", + span.anchor.file_id, + span.anchor.ast_id.into_raw(), + span.range, + span.ctx + ); } } } diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 822bdcc122dc6..29374945f647a 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -93,6 +93,41 @@ fn foo() { ); } +#[test] +fn macro_rules_in_attr() { + // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211 + check( + r#" +//- proc_macros: identity +macro_rules! id { + ($($t:tt)*) => { + $($t)* + }; +} +id! { + #[proc_macros::identity] + impl Foo for WrapBj { + async fn foo(&self) { + self.id().await; + } + } +} +"#, + expect![[r#" +macro_rules! id { + ($($t:tt)*) => { + $($t)* + }; +} +#[proc_macros::identity] impl Foo for WrapBj { + async fn foo(&self ) { + self .id().await ; + } +} +"#]], + ); +} + #[test] fn float_parsing_panic() { // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211 @@ -127,3 +162,27 @@ macro_rules! id { "#]], ); } + +#[test] +fn float_attribute_mapping() { + check( + r#" +//- proc_macros: identity +//+spans +#[proc_macros::identity] +fn foo(&self) { + self.0. 1; +} +"#, + expect![[r#" +//+spans +#[proc_macros::identity] +fn foo(&self) { + self.0. 1; +} + +fn#FileId(0):1@34..36\0# foo#FileId(0):1@37..40\0#(#FileId(0):1@40..41\0#&#FileId(0):1@41..42\0#self#FileId(0):1@42..46\0# )#FileId(0):1@46..47\0# {#FileId(0):1@48..49\0# + self#FileId(0):1@54..58\0# .#FileId(0):1@58..59\0#0#FileId(0):1@59..60\0#.#FileId(0):1@60..61\0#1#FileId(0):1@62..63\0#;#FileId(0):1@63..64\0# +}#FileId(0):1@65..66\0#"#]], + ); +} diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 360bf0f93e30b..fef13604225a9 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -219,6 +219,7 @@ enum MacroDirectiveKind { ast_id: AstIdWithPath, derive_attr: AttrId, derive_pos: usize, + call_site: SyntaxContextId, }, Attr { ast_id: AstIdWithPath, @@ -324,7 +325,7 @@ impl DefCollector<'_> { .parse_path_comma_token_tree(self.db.upcast()) .into_iter() .flatten() - .filter_map(|feat| match feat.segments() { + .filter_map(|(feat, _)| match feat.segments() { [name] => Some(name.to_smol_str()), _ => None, }); @@ -1139,12 +1140,13 @@ impl DefCollector<'_> { return false; } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { + MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { let id = derive_macro_as_call_id( self.db, ast_id, *derive_attr, *derive_pos as u32, + *call_site, self.def_map.krate, resolver, ); @@ -1242,7 +1244,7 @@ impl DefCollector<'_> { match attr.parse_path_comma_token_tree(self.db.upcast()) { Some(derive_macros) => { let mut len = 0; - for (idx, path) in derive_macros.enumerate() { + for (idx, (path, call_site)) in derive_macros.enumerate() { let ast_id = AstIdWithPath::new(file_id, ast_id.value, path); self.unresolved_macros.push(MacroDirective { module_id: directive.module_id, @@ -1251,6 +1253,7 @@ impl DefCollector<'_> { ast_id, derive_attr: attr.id, derive_pos: idx, + call_site, }, container: directive.container, }); @@ -1438,7 +1441,7 @@ impl DefCollector<'_> { )); } } - MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { + MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( directive.module_id, MacroCallKind::Derive { @@ -1828,7 +1831,7 @@ impl ModCollector<'_, '_> { ); return; }; - for path in paths { + for (path, _) in paths { if let Some(name) = path.as_ident() { single_imports.push(name.clone()); } diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs index e7cc44b04da80..b2ffbbe4c5d8f 100644 --- a/crates/hir-def/src/nameres/tests.rs +++ b/crates/hir-def/src/nameres/tests.rs @@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase}; use expect_test::{expect, Expect}; use triomphe::Arc; -use crate::{db::DefDatabase, test_db::TestDB}; - -use super::DefMap; +use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB}; fn compute_crate_def_map(ra_fixture: &str) -> Arc { let db = TestDB::with_files(ra_fixture); diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs index db64e7cd538d1..f4a6b61f7af5e 100644 --- a/crates/hir-def/src/test_db.rs +++ b/crates/hir-def/src/test_db.rs @@ -7,7 +7,7 @@ use base_db::{ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition, SourceDatabase, Upcast, }; -use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData, InFile}; +use hir_expand::{db::ExpandDatabase, InFile}; use rustc_hash::FxHashSet; use syntax::{algo, ast, AstNode}; use triomphe::Arc; @@ -34,7 +34,7 @@ pub(crate) struct TestDB { impl Default for TestDB { fn default() -> Self { let mut this = Self { storage: Default::default(), events: Default::default() }; - this.intern_syntax_context(SyntaxContextData::root()); + this.setup_syntax_context_root(); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 5ce12d2f6e717..76c787721b55a 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -2,13 +2,16 @@ use std::{fmt, ops}; use ::tt::SpanAnchor as _; -use base_db::{span::SpanAnchor, CrateId}; +use base_db::{ + span::{SpanAnchor, SyntaxContextId}, + CrateId, +}; use cfg::CfgExpr; use either::Either; use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; -use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; +use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use triomphe::Arc; use crate::{ @@ -54,6 +57,9 @@ impl RawAttrs { id, input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), path: Interned::new(ModPath::from(crate::name!(doc))), + ctxt: hygiene + .span_for_range(comment.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx), }), }) .collect::>(); @@ -191,6 +197,7 @@ pub struct Attr { pub id: AttrId, pub path: Interned, pub input: Option>, + pub ctxt: SyntaxContextId, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -235,7 +242,14 @@ impl Attr { } else { None }; - Some(Attr { id, path, input }) + Some(Attr { + id, + path, + input, + ctxt: hygiene + .span_for_range(ast.syntax().text_range()) + .map_or(SyntaxContextId::ROOT, |s| s.ctx), + }) } fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { @@ -284,9 +298,8 @@ impl Attr { pub fn parse_path_comma_token_tree<'a>( &'a self, db: &'a dyn ExpandDatabase, - ) -> Option + 'a> { + ) -> Option + 'a> { let args = self.token_tree_value()?; - dbg!(args); if args.delimiter.kind != DelimiterKind::Parenthesis { return None; @@ -298,6 +311,11 @@ impl Attr { if tts.is_empty() { return None; } + // FIXME: Absolutely wrong + let call_site = match tts.first().unwrap() { + tt::TokenTree::Leaf(l) => l.span().ctx, + tt::TokenTree::Subtree(s) => s.delimiter.open.ctx, + }; // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // here. let subtree = tt::Subtree { @@ -313,7 +331,7 @@ impl Attr { return None; } let path = meta.path()?; - ModPath::from_src(db, path, &span_map) + Some((ModPath::from_src(db, path, &span_map)?, call_site)) }); Some(paths) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 7dd69099a6e3f..e176bef78b5ad 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -103,7 +103,7 @@ pub trait ExpandDatabase: SourceDatabase { &self, macro_file: MacroFile, ) -> ExpandResult<(Parse, Arc)>; - // TODO: transparent? + // FIXME: This always allocates one for non macro files which is wasteful. #[salsa::transparent] fn span_map(&self, file_id: HirFileId) -> Arc; @@ -117,6 +117,8 @@ pub trait ExpandDatabase: SourceDatabase { #[salsa::interned] fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId; #[salsa::transparent] + fn setup_syntax_context_root(&self) -> (); + #[salsa::transparent] #[salsa::invoke(hygiene::apply_mark)] fn apply_mark( &self, @@ -770,3 +772,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult> Ok(()) } } + +fn setup_syntax_context_root(db: &dyn ExpandDatabase) { + db.intern_syntax_context(SyntaxContextData::root()); +} diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs index a3383b2b5dfbc..6f4aef22d2f78 100644 --- a/crates/hir-ty/src/test_db.rs +++ b/crates/hir-ty/src/test_db.rs @@ -7,7 +7,7 @@ use base_db::{ AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast, }; use hir_def::{db::DefDatabase, ModuleId}; -use hir_expand::{db::ExpandDatabase, hygiene::SyntaxContextData}; +use hir_expand::db::ExpandDatabase; use nohash_hasher::IntMap; use rustc_hash::FxHashSet; use syntax::TextRange; @@ -30,7 +30,7 @@ pub(crate) struct TestDB { impl Default for TestDB { fn default() -> Self { let mut this = Self { storage: Default::default(), events: Default::default() }; - this.intern_syntax_context(SyntaxContextData::root()); + this.setup_syntax_context_root(); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this } diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index d16e0eb0137bf..1e10a6fecafb6 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -63,10 +63,10 @@ fn infer_macros_expanded() { } "#, expect![[r#" - !0..17 '{Foo(v...,2,])}': Foo + !0..21 '{Foo(v...2),])}': Foo !1..4 'Foo': Foo({unknown}) -> Foo - !1..16 'Foo(vec![1,2,])': Foo - !5..15 'vec![1,2,]': {unknown} + !1..20 'Foo(ve...(2),])': Foo + !5..19 'vec![(1),(2),]': {unknown} 155..181 '{ ...,2); }': () 165..166 'x': Foo "#]], @@ -96,10 +96,10 @@ fn infer_legacy_textual_scoped_macros_expanded() { } "#, expect![[r#" - !0..17 '{Foo(v...,2,])}': Foo + !0..21 '{Foo(v...2),])}': Foo !1..4 'Foo': Foo({unknown}) -> Foo - !1..16 'Foo(vec![1,2,])': Foo - !5..15 'vec![1,2,]': {unknown} + !1..20 'Foo(ve...(2),])': Foo + !5..19 'vec![(1),(2),]': {unknown} 194..250 '{ ...,2); }': () 204..205 'x': Foo 227..228 'y': {unknown} diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 957c9ad26ca20..38c9a3538f3a7 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -144,6 +144,7 @@ impl RootDatabase { db.set_library_roots_with_durability(Default::default(), Durability::HIGH); db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH); db.update_parse_query_lru_capacity(lru_capacity); + db.setup_syntax_context_root(); db } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b843db510e39c..f47123336b44d 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -717,24 +717,29 @@ where /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { - // TODO: FIXME this breaks the hygiene map - let (text, _span) = match self.cursor.token_tree() { + let (text, span) = match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { (lit.text.as_str(), lit.span) } _ => unreachable!(), }; + // FIXME: Span splitting match text.split_once('.') { Some((left, right)) => { assert!(!left.is_empty()); + self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, left); self.inner.finish_node(); + let range = TextRange::at(self.text_pos, TextSize::of(left)); + self.token_map.insert(range, span); // here we move the exit up, the original exit has been deleted in process self.inner.finish_node(); self.inner.token(SyntaxKind::DOT, "."); + let range = TextRange::at(range.end(), TextSize::of(".")); + self.token_map.insert(range, span); if has_pseudo_dot { assert!(right.is_empty(), "{left}.{right}"); @@ -742,11 +747,14 @@ where assert!(!right.is_empty(), "{left}.{right}"); self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, right); + let range = TextRange::at(range.end(), TextSize::of(right)); + self.token_map.insert(range, span); self.inner.finish_node(); // the parser creates an unbalanced start node, we are required to close it here self.inner.finish_node(); } + self.text_pos += TextSize::of(text); } None => unreachable!(), } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index c825bd01bcd63..dfbf54410b180 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,6 +2,7 @@ use std::hash::Hash; +use stdx::never; use syntax::TextRange; use tt::Span; @@ -59,11 +60,10 @@ impl TokenMap { .max_by_key(|(_, _, intersection)| intersection.len()) .map(|(_, &s, _)| s) .or_else(|| { - if self.real_file { - None - } else { - panic!("no span for range {range:?} in {:#?}", self.span_map) + if !self.real_file { + never!("no span for range {:?} in {:#?}", range, self.span_map); } + None }) } From 30093a6d81f49854d0ad068765bf7af9fdb2a101 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 24 Nov 2023 16:38:48 +0100 Subject: [PATCH 09/80] spans always come from real file --- Cargo.lock | 1 + crates/base-db/src/span.rs | 9 +- crates/cfg/src/tests.rs | 40 +- crates/hir-def/src/attr.rs | 50 +- crates/hir-def/src/attr/tests.rs | 16 +- crates/hir-def/src/expander.rs | 26 +- crates/hir-def/src/find_path.rs | 5 +- crates/hir-def/src/item_scope.rs | 9 + crates/hir-def/src/item_tree.rs | 15 +- crates/hir-def/src/item_tree/lower.rs | 132 +---- crates/hir-def/src/lib.rs | 17 +- crates/hir-def/src/lower.rs | 13 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 35 +- .../hir-def/src/macro_expansion_tests/mod.rs | 45 +- .../src/macro_expansion_tests/proc_macros.rs | 11 +- crates/hir-def/src/nameres/collector.rs | 2 +- crates/hir-def/src/path/lower.rs | 19 +- crates/hir-def/src/visibility.rs | 8 +- crates/hir-expand/src/ast_id_map.rs | 47 +- crates/hir-expand/src/attrs.rs | 49 +- crates/hir-expand/src/builtin_attr_macro.rs | 9 +- crates/hir-expand/src/builtin_derive_macro.rs | 122 ++-- crates/hir-expand/src/builtin_fn_macro.rs | 3 +- crates/hir-expand/src/db.rs | 152 ++--- crates/hir-expand/src/eager.rs | 78 +-- crates/hir-expand/src/files.rs | 293 ++++++++++ crates/hir-expand/src/hygiene.rs | 67 ++- crates/hir-expand/src/lib.rs | 551 +++++------------- crates/hir-expand/src/mod_path.rs | 56 +- crates/hir-expand/src/span.rs | 109 ++++ crates/hir/src/attrs.rs | 12 +- crates/hir/src/lib.rs | 34 +- crates/hir/src/semantics.rs | 38 +- crates/hir/src/symbols.rs | 12 +- .../ide-completion/src/tests/proc_macros.rs | 2 + crates/ide-db/src/rename.rs | 79 ++- .../src/handlers/macro_error.rs | 4 - .../src/handlers/typed_hole.rs | 2 +- .../src/handlers/unresolved_module.rs | 4 +- crates/ide/src/expand_macro.rs | 10 +- crates/ide/src/goto_definition.rs | 4 +- crates/ide/src/goto_implementation.rs | 2 +- crates/ide/src/hover.rs | 1 + .../ide/src/inlay_hints/closure_captures.rs | 5 +- crates/ide/src/navigation_target.rs | 43 +- crates/ide/src/static_index.rs | 1 + .../test_data/highlight_strings.html | 2 +- crates/mbe/src/benchmark.rs | 19 +- crates/mbe/src/lib.rs | 4 +- crates/mbe/src/syntax_bridge.rs | 194 +++--- crates/mbe/src/syntax_bridge/tests.rs | 15 +- crates/mbe/src/token_map.rs | 57 +- crates/proc-macro-srv/src/lib.rs | 2 +- crates/rust-analyzer/src/cargo_target_spec.rs | 22 +- crates/stdx/Cargo.toml | 1 + crates/stdx/src/lib.rs | 19 + crates/tt/src/lib.rs | 14 +- 57 files changed, 1368 insertions(+), 1223 deletions(-) create mode 100644 crates/hir-expand/src/files.rs create mode 100644 crates/hir-expand/src/span.rs diff --git a/Cargo.lock b/Cargo.lock index 90ee0810fa6aa..3618d69c749a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1752,6 +1752,7 @@ dependencies = [ "always-assert", "backtrace", "crossbeam-channel", + "itertools 0.12.0", "jod-thread", "libc", "miow", diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 600aa079333eb..a78f558759b00 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -35,11 +35,15 @@ impl SyntaxContextId { // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) }); + + pub fn is_root(self) -> bool { + self == Self::ROOT + } } #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct SpanAnchor { - pub file_id: HirFileId, + pub file_id: FileId, pub ast_id: ErasedFileAstId, } @@ -50,7 +54,7 @@ impl fmt::Debug for SpanAnchor { } impl tt::SpanAnchor for SpanAnchor { - const DUMMY: Self = SpanAnchor { file_id: HirFileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; + const DUMMY: Self = SpanAnchor { file_id: FileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; } /// Input to the analyzer is a set of files, where each file is identified by @@ -101,7 +105,6 @@ impl fmt::Debug for HirFileId { pub struct MacroFile { pub macro_call_id: MacroCallId, } - /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 0ea176858c944..61cdbded0b99e 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,12 +1,12 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::syntax_node_to_token_tree; +use mbe::{syntax_node_to_token_tree, SpanMapper}; use syntax::{ast, AstNode}; use tt::{SpanAnchor, SyntaxContext}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] struct DummyFile; impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; @@ -17,15 +17,18 @@ impl SyntaxContext for DummyCtx { const DUMMY: Self = DummyCtx; } +struct NoOpMap; + +impl SpanMapper> for NoOpMap { + fn span_for(&self, range: syntax::TextRange) -> tt::SpanData { + tt::SpanData { range, anchor: DummyFile, ctx: DummyCtx } + } +} + fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, DummyCtx>( - tt.syntax(), - DummyFile, - 0.into(), - &Default::default(), - ); + let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } @@ -33,12 +36,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, DummyCtx>( - tt.syntax(), - DummyFile, - 0.into(), - &Default::default(), - ); + let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); @@ -47,12 +45,7 @@ fn check_dnf(input: &str, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, DummyCtx>( - tt.syntax(), - DummyFile, - 0.into(), - &Default::default(), - ); + let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -63,12 +56,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, DummyCtx>( - tt.syntax(), - DummyFile, - 0.into(), - &Default::default(), - ); + let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 35c9d63979a06..45dd981dceece 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -7,10 +7,7 @@ mod tests; use std::{hash::Hash, ops, slice::Iter as SliceIter}; -use base_db::{ - span::{ErasedFileAstId, SpanAnchor}, - CrateId, -}; +use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ @@ -31,8 +28,8 @@ use crate::{ lang_item::LangItem, nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource}, - AdtId, AssocItemLoc, AttrDefId, EnumId, GenericDefId, GenericParamId, ItemLoc, - LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId, + AdtId, AssocItemLoc, AttrDefId, EnumId, GenericParamId, ItemLoc, LocalEnumVariantId, + LocalFieldId, Lookup, MacroId, VariantId, }; #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -419,43 +416,30 @@ impl AttrsWithOwner { AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::GenericParamId(it) => { - let ast_id = |p| match p { - GenericDefId::AdtId(AdtId::StructId(it)) => { - erased_ast_id_from_item_tree(db, it) - } - GenericDefId::AdtId(AdtId::EnumId(it)) => erased_ast_id_from_item_tree(db, it), - GenericDefId::AdtId(AdtId::UnionId(it)) => erased_ast_id_from_item_tree(db, it), - GenericDefId::TraitId(it) => erased_ast_id_from_item_tree(db, it), - GenericDefId::TraitAliasId(it) => erased_ast_id_from_item_tree(db, it), - GenericDefId::ImplId(it) => erased_ast_id_from_item_tree(db, it), - GenericDefId::EnumVariantId(it) => erased_ast_id_from_item_tree(db, it.parent), - GenericDefId::TypeAliasId(it) => erased_ast_id_from_item_tree_assoc(db, it), - GenericDefId::FunctionId(it) => erased_ast_id_from_item_tree_assoc(db, it), - GenericDefId::ConstId(it) => erased_ast_id_from_item_tree_assoc(db, it), - }; + // FIXME: we could probably just make these relative to the params? match it { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( db.upcast(), - SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), ) } GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( db.upcast(), - SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent()) }, src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), ) } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); RawAttrs::from_attrs_owner( db.upcast(), - SpanAnchor { file_id: src.file_id, ast_id: ast_id(it.parent) }, src.with_value(&src.value[it.local_id]), + db.span_map(src.file_id).as_ref(), ) } } @@ -663,26 +647,6 @@ fn any_has_attrs( id.lookup(db).source(db).map(ast::AnyHasAttrs::new) } -fn erased_ast_id_from_item_tree( - db: &dyn DefDatabase, - lookup: impl Lookup>, -) -> ErasedFileAstId { - let id = lookup.lookup(db).id; - let tree = id.item_tree(db); - let mod_item = N::id_to_mod_item(id.value); - mod_item.ast_id(&tree).erase() -} - -fn erased_ast_id_from_item_tree_assoc( - db: &dyn DefDatabase, - lookup: impl Lookup>, -) -> ErasedFileAstId { - let id = lookup.lookup(db).id; - let tree = id.item_tree(db); - let mod_item = N::id_to_mod_item(id.value); - mod_item.ast_id(&tree).erase() -} - fn attrs_from_item_tree(db: &dyn DefDatabase, id: ItemTreeId) -> RawAttrs { let tree = id.item_tree(db); let mod_item = N::id_to_mod_item(id.value); diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index 60e5cebd3cb06..0f21dc98299f4 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -1,27 +1,19 @@ //! This module contains tests for doc-expression parsing. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. -use base_db::span::SpanAnchor; +use base_db::FileId; +use hir_expand::span::{RealSpanMap, SpanMapRef}; use mbe::syntax_node_to_token_tree; use syntax::{ast, AstNode}; -use tt::{SpanAnchor as _, SyntaxContext}; use crate::attr::{DocAtom, DocExpr}; -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -struct DummyCtx; -impl SyntaxContext for DummyCtx { - const DUMMY: Self = DummyCtx; -} - fn assert_parse_result(input: &str, expected: DocExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, DummyCtx>( + let tt = syntax_node_to_token_tree( tt.syntax(), - SpanAnchor::DUMMY, - 0.into(), - &Default::default(), + SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))), ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 793c8ddeb504d..d8ee61a3dca36 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -1,28 +1,24 @@ //! Macro expansion utilities. -use base_db::{ - span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, - CrateId, -}; +use base_db::CrateId; use cfg::CfgOptions; use drop_bomb::DropBomb; use hir_expand::{ - attrs::RawAttrs, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, - SpanMap, UnresolvedMacro, + attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId, + InFile, MacroCallId, }; use limit::Limit; use syntax::{ast, Parse, SyntaxNode}; -use triomphe::Arc; use crate::{ attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, - MacroId, ModuleId, + MacroId, ModuleId, UnresolvedMacro, }; #[derive(Debug)] pub struct Expander { cfg_options: CfgOptions, - hygiene: Arc, + hygiene: SpanMap, krate: CrateId, pub(crate) current_file_id: HirFileId, pub(crate) module: ModuleId, @@ -122,17 +118,7 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter( - db, - self.krate, - RawAttrs::new( - db.upcast(), - // Usin `ROOT_ERASED_FILE_AST_ID` here is fine as this is only used for cfg checking - SpanAnchor { file_id: self.current_file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - owner, - &self.hygiene, - ), - ) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.hygiene.as_ref())) } pub(crate) fn cfg_options(&self) -> &CfgOptions { diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 5051884714ba4..13af0b0218e89 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -586,7 +586,7 @@ fn find_local_import_locations( #[cfg(test)] mod tests { use base_db::fixture::WithFixture; - use hir_expand::SpanMap; + use hir_expand::db::ExpandDatabase; use syntax::ast::AstNode; use crate::test_db::TestDB; @@ -608,7 +608,8 @@ mod tests { let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); - let mod_path = ModPath::from_src(&db, ast_path, &SpanMap::default()).unwrap(); + let mod_path = + ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap(); let def_map = module.def_map(&db); let resolved = def_map diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 7c11fb9d13676..ce83cb435e2e8 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -112,6 +112,7 @@ pub struct ItemScope { #[derive(Debug, PartialEq, Eq)] struct DeriveMacroInvocation { attr_id: AttrId, + /// The `#[derive]` call attr_call_id: MacroCallId, derive_call_ids: SmallVec<[Option; 1]>, } @@ -401,6 +402,14 @@ impl ItemScope { }) } + pub fn derive_macro_invoc( + &self, + ast_id: AstId, + attr_id: AttrId, + ) -> Option { + Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id) + } + // FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option { self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a) diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 901e14a2117ba..9c61deb423cb9 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -43,10 +43,7 @@ use std::{ }; use ast::{AstNode, HasName, StructKind}; -use base_db::{ - span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - CrateId, -}; +use base_db::{span::SyntaxContextId, CrateId}; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, @@ -121,7 +118,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db.upcast(), SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &file, ctx.span_map())); + top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -780,8 +777,8 @@ impl Use { let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); let hygiene = db.span_map(file_id); - let (_, source_map) = - lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); + let (_, source_map) = lower::lower_use_tree(db, hygiene.as_ref(), ast_use_tree) + .expect("failed to lower use tree"); source_map[index].clone() } /// Maps a `UseTree` contained in this import back to its AST node. @@ -795,7 +792,9 @@ impl Use { let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); let hygiene = db.span_map(file_id); - lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1 + lower::lower_use_tree(db, hygiene.as_ref(), ast_use_tree) + .expect("failed to lower use tree") + .1 } } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 0b3def6d756b6..933a59be15347 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,8 +2,7 @@ use std::collections::hash_map::Entry; -use base_db::span::ErasedFileAstId; -use hir_expand::{ast_id_map::AstIdMap, HirFileId, SpanMap}; +use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId}; use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use crate::{ @@ -23,7 +22,6 @@ pub(super) struct Ctx<'a> { tree: ItemTree, source_ast_id_map: Arc, body_ctx: crate::lower::LowerCtx<'a>, - file: HirFileId, } impl<'a> Ctx<'a> { @@ -33,11 +31,10 @@ impl<'a> Ctx<'a> { tree: ItemTree::default(), source_ast_id_map: db.ast_id_map(file), body_ctx: crate::lower::LowerCtx::with_file_id(db, file), - file, } } - pub(super) fn span_map(&self) -> &SpanMap { + pub(super) fn span_map(&self) -> SpanMapRef<'_> { self.body_ctx.span_map() } @@ -81,18 +78,9 @@ impl<'a> Ctx<'a> { } pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { - self.tree.attrs.insert( - AttrOwner::TopLevel, - RawAttrs::new( - self.db.upcast(), - SpanAnchor { - file_id: self.file, - ast_id: self.source_ast_id_map.ast_id(block).erase(), - }, - block, - self.span_map(), - ), - ); + self.tree + .attrs + .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map())); self.tree.top_level = block .statements() .filter_map(|stmt| match stmt { @@ -141,12 +129,7 @@ impl<'a> Ctx<'a> { ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), }; - let attrs = RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id: mod_item.ast_id(&self.tree).erase() }, - item, - self.span_map(), - ); + let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map()); self.add_attrs(mod_item.into(), attrs); Some(mod_item) @@ -170,12 +153,7 @@ impl<'a> Ctx<'a> { ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), }?; - let attrs = RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id: item.ast_id(&self.tree).erase() }, - item_node, - self.span_map(), - ); + let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map()); self.add_attrs( match item { AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)), @@ -192,7 +170,7 @@ impl<'a> Ctx<'a> { let visibility = self.lower_visibility(strukt); let name = strukt.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(strukt); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt, ast_id.erase()); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); let fields = self.lower_fields(&strukt.kind()); let res = Struct { name, visibility, generic_params, fields, ast_id }; Some(id(self.data().structs.alloc(res))) @@ -216,19 +194,10 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { - let ast_id = match data.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }; let idx = self.data().fields.alloc(data); self.add_attrs( idx.into(), - RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id }, - &field, - self.span_map(), - ), + RawAttrs::new(self.db.upcast(), &field, self.span_map()), ); } } @@ -249,20 +218,8 @@ impl<'a> Ctx<'a> { let start = self.next_field_idx(); for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); - let ast_id = match data.ast_id { - FieldAstId::Record(it) => it.erase(), - FieldAstId::Tuple(it) => it.erase(), - }; let idx = self.data().fields.alloc(data); - self.add_attrs( - idx.into(), - RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id }, - &field, - self.span_map(), - ), - ); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map())); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -280,7 +237,7 @@ impl<'a> Ctx<'a> { let visibility = self.lower_visibility(union); let name = union.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(union); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, union, ast_id.erase()); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); let fields = match union.record_field_list() { Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), @@ -293,7 +250,7 @@ impl<'a> Ctx<'a> { let visibility = self.lower_visibility(enum_); let name = enum_.name()?.as_name(); let ast_id = self.source_ast_id_map.ast_id(enum_); - let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_, ast_id.erase()); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); let variants = match &enum_.variant_list() { Some(variant_list) => self.lower_variants(variant_list), None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()), @@ -306,16 +263,10 @@ impl<'a> Ctx<'a> { let start = self.next_variant_idx(); for variant in variants.variants() { if let Some(data) = self.lower_variant(&variant) { - let ast_id = data.ast_id.erase(); let idx = self.data().variants.alloc(data); self.add_attrs( idx.into(), - RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id }, - &variant, - self.span_map(), - ), + RawAttrs::new(self.db.upcast(), &variant, self.span_map()), ); } } @@ -366,12 +317,7 @@ impl<'a> Ctx<'a> { }); self.add_attrs( idx.into(), - RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, - &self_param, - self.span_map(), - ), + RawAttrs::new(self.db.upcast(), &self_param, self.span_map()), ); has_self_param = true; } @@ -392,12 +338,7 @@ impl<'a> Ctx<'a> { }; self.add_attrs( idx.into(), - RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id: ast_id.erase() }, - ¶m, - self.span_map(), - ), + RawAttrs::new(self.db.upcast(), ¶m, self.span_map()), ); } } @@ -455,8 +396,7 @@ impl<'a> Ctx<'a> { ast_id, flags, }; - res.explicit_generic_params = - self.lower_generic_params(HasImplicitSelf::No, func, ast_id.erase()); + res.explicit_generic_params = self.lower_generic_params(HasImplicitSelf::No, func); Some(id(self.data().functions.alloc(res))) } @@ -470,8 +410,7 @@ impl<'a> Ctx<'a> { let visibility = self.lower_visibility(type_alias); let bounds = self.lower_type_bounds(type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias); - let generic_params = - self.lower_generic_params(HasImplicitSelf::No, type_alias, ast_id.erase()); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias); let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; Some(id(self.data().type_aliases.alloc(res))) } @@ -520,11 +459,8 @@ impl<'a> Ctx<'a> { let name = trait_def.name()?.as_name(); let visibility = self.lower_visibility(trait_def); let ast_id = self.source_ast_id_map.ast_id(trait_def); - let generic_params = self.lower_generic_params( - HasImplicitSelf::Yes(trait_def.type_bound_list()), - trait_def, - ast_id.erase(), - ); + let generic_params = + self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); let is_auto = trait_def.auto_token().is_some(); let is_unsafe = trait_def.unsafe_token().is_some(); @@ -549,7 +485,6 @@ impl<'a> Ctx<'a> { let generic_params = self.lower_generic_params( HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), trait_alias_def, - ast_id.erase(), ); let alias = TraitAlias { name, visibility, generic_params, ast_id }; @@ -560,8 +495,7 @@ impl<'a> Ctx<'a> { let ast_id = self.source_ast_id_map.ast_id(impl_def); // Note that trait impls don't get implicit `Self` unlike traits, because here they are a // type alias rather than a type parameter, so this is handled by the resolver. - let generic_params = - self.lower_generic_params(HasImplicitSelf::No, impl_def, ast_id.erase()); + let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only // equals itself. @@ -615,9 +549,7 @@ impl<'a> Ctx<'a> { path, ast_id, expand_to, - call_site: span_map - .span_for_range(m.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx), + call_site: span_map.span_for_range(m.syntax().text_range()).ctx, }; Some(id(self.data().macro_calls.alloc(res))) } @@ -656,15 +588,7 @@ impl<'a> Ctx<'a> { ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(), ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(), }; - let attrs = RawAttrs::new( - self.db.upcast(), - SpanAnchor { - file_id: self.file, - ast_id: mod_item.ast_id(&self.tree).erase(), - }, - &item, - self.span_map(), - ); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map()); self.add_attrs(mod_item.into(), attrs); Some(mod_item) }) @@ -679,7 +603,6 @@ impl<'a> Ctx<'a> { &mut self, has_implicit_self: HasImplicitSelf, node: &dyn ast::HasGenericParams, - owner_ast_id: ErasedFileAstId, ) -> Interned { let mut generics = GenericParams::default(); @@ -701,12 +624,7 @@ impl<'a> Ctx<'a> { let add_param_attrs = |item: Either, param| { - let attrs = RawAttrs::new( - self.db.upcast(), - SpanAnchor { file_id: self.file, ast_id: owner_ast_id }, - ¶m, - self.body_ctx.span_map(), - ); + let attrs = RawAttrs::new(self.db.upcast(), ¶m, self.body_ctx.span_map()); // This is identical to the body of `Ctx::add_attrs()` but we can't call that here // because it requires `&mut self` and the call to `generics.fill()` below also // references `self`. @@ -817,7 +735,7 @@ fn lower_abi(abi: ast::Abi) -> Interned { struct UseTreeLowering<'a> { db: &'a dyn DefDatabase, - hygiene: &'a SpanMap, + hygiene: SpanMapRef<'a>, mapping: Arena, } @@ -885,7 +803,7 @@ impl UseTreeLowering<'_> { pub(crate) fn lower_use_tree( db: &dyn DefDatabase, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, tree: ast::UseTree, ) -> Option<(UseTree, Arena)> { let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 65f1dcc850198..f9374347c264d 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -75,7 +75,7 @@ use hir_expand::{ name::Name, proc_macro::ProcMacroExpander, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, - MacroDefId, MacroDefKind, UnresolvedMacro, + MacroDefId, MacroDefKind, }; use item_tree::ExternBlock; use la_arena::Idx; @@ -1166,15 +1166,14 @@ impl AsMacroCall for InFile<&ast::MacroCall> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); let span_map = db.span_map(self.file_id); - let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &span_map)); + let path = + self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref())); let Some(path) = path else { return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); }; - let call_site = span_map - .span_for_range(self.value.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx); + let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; macro_call_as_call_id_with_eager( db, @@ -1228,7 +1227,7 @@ fn macro_call_as_call_id_with_eager( let res = match def.kind { MacroDefKind::BuiltInEager(..) => { let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db)); - expand_eager_macro_input(db, krate, macro_call, def, &|path| { + expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| { eager_resolver(path).filter(MacroDefId::is_fn_like) }) } @@ -1370,6 +1369,12 @@ fn attr_macro_as_call_id( macro_attr.ctxt, ) } + +#[derive(Debug)] +pub struct UnresolvedMacro { + pub path: hir_expand::mod_path::ModPath, +} + intern::impl_internable!( crate::type_ref::TypeRef, crate::type_ref::TraitRef, diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index 28a652a60a768..a5c22898245d3 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -3,7 +3,8 @@ use std::cell::OnceCell; use hir_expand::{ ast_id_map::{AstIdMap, AstIdNode}, - AstId, HirFileId, InFile, SpanMap, + span::{SpanMap, SpanMapRef}, + AstId, HirFileId, InFile, }; use syntax::ast; use triomphe::Arc; @@ -12,13 +13,13 @@ use crate::{db::DefDatabase, path::Path}; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, - hygiene: Arc, + hygiene: SpanMap, // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways. ast_id_map: Option<(HirFileId, OnceCell>)>, } impl<'a> LowerCtx<'a> { - pub fn new(db: &'a dyn DefDatabase, hygiene: Arc, file_id: HirFileId) -> Self { + pub fn new(db: &'a dyn DefDatabase, hygiene: SpanMap, file_id: HirFileId) -> Self { LowerCtx { db, hygiene, ast_id_map: Some((file_id, OnceCell::new())) } } @@ -26,12 +27,12 @@ impl<'a> LowerCtx<'a> { LowerCtx { db, hygiene: db.span_map(file_id), ast_id_map: Some((file_id, OnceCell::new())) } } - pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: Arc) -> Self { + pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: SpanMap) -> Self { LowerCtx { db, hygiene, ast_id_map: None } } - pub(crate) fn span_map(&self) -> &SpanMap { - &self.hygiene + pub(crate) fn span_map(&self) -> SpanMapRef<'_> { + self.hygiene.as_ref() } pub(crate) fn lower_path(&self, ast: ast::Path) -> Option { diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index dcecec4e8e4e9..fc17dcde9a072 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -23,12 +23,9 @@ macro_rules! f { }; } -// +spans +// +spans+syntaxctxt f!(struct MyTraitMap2); "#, - // FIXME: #SpanAnchor(FileId(0), 1)@91..92\2# why is there whitespace annotated with a span - // here? Presumably because the leading `::` is getting two spans instead of one? Sounds - // liek glueing might be failing here expect![[r#" macro_rules! f { ( struct $ident:ident ) => { @@ -38,8 +35,8 @@ macro_rules! f { }; } -struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@20..31\0# {#FileId(0):1@72..73\2# - map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@91..92\2#::#FileId(0):1@92..93\2#std#FileId(0):1@93..96\2#::#FileId(0):1@97..98\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@110..111\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# +struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2# + map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@92..93\2#std#FileId(0):1@93..96\2#::#FileId(0):1@97..98\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@110..111\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# }#FileId(0):1@132..133\2# "#]], ); @@ -51,14 +48,14 @@ fn token_mapping_floats() { // (and related issues) check( r#" -// +spans +// +spans+syntaxctxt macro_rules! f { ($($tt:tt)*) => { $($tt)* }; } -// +spans +// +spans+syntaxctxt f! { fn main() { 1; @@ -71,19 +68,19 @@ f! { "#, expect![[r#" -// +spans +// +spans+syntaxctxt macro_rules! f { ($($tt:tt)*) => { $($tt)* }; } -fn#FileId(0):2@19..21\0# main#FileId(0):2@22..26\0#(#FileId(0):2@26..27\0#)#FileId(0):2@27..28\0# {#FileId(0):2@29..30\0# - 1#FileId(0):2@39..40\0#;#FileId(0):2@40..41\0# - 1.0#FileId(0):2@50..53\0#;#FileId(0):2@53..54\0# - (#FileId(0):2@63..64\0#(#FileId(0):2@64..65\0#1#FileId(0):2@65..66\0#,#FileId(0):2@66..67\0# )#FileId(0):2@67..68\0#,#FileId(0):2@68..69\0# )#FileId(0):2@69..70\0#.#FileId(0):2@70..71\0#0#FileId(0):2@71..74\0#.#FileId(0):2@71..74\0#0#FileId(0):2@71..74\0#;#FileId(0):2@74..75\0# - let#FileId(0):2@84..87\0# x#FileId(0):2@88..89\0# =#FileId(0):2@90..91\0# 1#FileId(0):2@92..93\0#;#FileId(0):2@93..94\0# -}#FileId(0):2@99..100\0# +fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0# + 1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0# + 1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0# + (#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0# + let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0# +}#FileId(0):2@110..111\0# "#]], @@ -127,7 +124,7 @@ macro_rules! identity { } fn main(foo: ()) { - format_args/*+spans*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + format_args/*+spans+syntaxctxt*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") } "#, @@ -141,7 +138,7 @@ macro_rules! identity { } fn main(foo: ()) { - builtin#FileId(0):0@0..0\0# ##FileId(0):0@0..0\0#format_args#FileId(0):0@0..0\0# (#FileId(0):6@22..23\0#"{} {} {}"#FileId(0):6@23..33\0#,#FileId(0):6@33..34\0# format_args#FileId(0):6@35..46\0#!#FileId(0):6@46..47\0#(#FileId(0):6@47..48\0#"{}"#FileId(0):6@48..52\0#,#FileId(0):6@52..53\0# 0#FileId(0):6@54..55\0#)#FileId(0):6@55..56\0#,#FileId(0):6@56..57\0# foo#FileId(0):6@58..61\0#,#FileId(0):6@61..62\0# identity#FileId(0):6@63..71\0#!#FileId(0):6@71..72\0#(#FileId(0):6@72..73\0#10#FileId(0):6@73..75\0#)#FileId(0):6@75..76\0#,#FileId(0):6@76..77\0# "bar"#FileId(0):6@78..83\0#)#FileId(0):6@83..84\0# + builtin#FileId(0):0@0..0\0# ##FileId(0):0@0..0\0#format_args#FileId(0):0@0..0\0# (#FileId(0):3@56..57\0#"{} {} {}"#FileId(0):3@57..67\0#,#FileId(0):3@67..68\0# format_args#FileId(0):3@69..80\0#!#FileId(0):3@80..81\0#(#FileId(0):3@81..82\0#"{}"#FileId(0):3@82..86\0#,#FileId(0):3@86..87\0# 0#FileId(0):3@88..89\0#)#FileId(0):3@89..90\0#,#FileId(0):3@90..91\0# foo#FileId(0):3@92..95\0#,#FileId(0):3@95..96\0# identity#FileId(0):3@97..105\0#!#FileId(0):3@105..106\0#(#FileId(0):3@106..107\0#10#FileId(0):3@107..109\0#)#FileId(0):3@109..110\0#,#FileId(0):3@110..111\0# "bar"#FileId(0):3@112..117\0#)#FileId(0):3@117..118\0# } "##]], @@ -156,7 +153,7 @@ fn token_mapping_across_files() { #[macro_use] mod foo; -mk_struct/*+spans*/!(Foo with u32); +mk_struct/*+spans+syntaxctxt*/!(Foo with u32); //- /foo.rs macro_rules! mk_struct { ($foo:ident with $ty:ty) => { struct $foo($ty); } @@ -166,7 +163,7 @@ macro_rules! mk_struct { #[macro_use] mod foo; -struct#FileId(1):1@59..65\2# Foo#FileId(0):2@21..24\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@30..33\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2# +struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2# "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index f770d2832efb7..27ec63d171b07 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -18,7 +18,7 @@ use std::{iter, ops::Range, sync}; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{db::ExpandDatabase, HirFileIdExt, InFile, MacroFile, SpanMap}; +use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFile}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, @@ -104,10 +104,12 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let mut tree = false; let mut expect_errors = false; let mut show_spans = false; + let mut show_ctxt = false; for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { tree |= comment.to_string().contains("+tree"); expect_errors |= comment.to_string().contains("+errors"); show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); } let mut expn_text = String::new(); @@ -128,8 +130,12 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream parse.syntax_node(), ); } - let pp = - pretty_print_macro_expansion(parse.syntax_node(), show_spans.then_some(&*token_map)); + let pp = pretty_print_macro_expansion( + parse.syntax_node(), + SpanMapRef::ExpansionSpanMap(&token_map), + show_spans, + show_ctxt, + ); let indent = IndentLevel::from_node(call.syntax()); let pp = reindent(indent, pp); format_to!(expn_text, "{}", pp); @@ -169,12 +175,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) { let call = src.file_id.call_node(&db).expect("macro file"); let mut show_spans = false; + let mut show_ctxt = false; for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) { show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); } let pp = pretty_print_macro_expansion( src.value, - show_spans.then_some(&db.span_map(src.file_id)), + db.span_map(src.file_id).as_ref(), + show_spans, + show_ctxt, ); format_to!(expanded_text, "\n{}", pp) } @@ -184,7 +194,12 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); if src.file_id.is_builtin_derive(&db) { - let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None); + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(src.file_id).as_ref(), + false, + false, + ); format_to!(expanded_text, "\n{}", pp) } } @@ -209,7 +224,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String { res } -fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> String { +fn pretty_print_macro_expansion( + expn: SyntaxNode, + map: SpanMapRef<'_>, + show_spans: bool, + show_ctxt: bool, +) -> String { let mut res = String::new(); let mut prev_kind = EOF; let mut indent_level = 0; @@ -255,17 +275,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&SpanMap>) -> Stri } prev_kind = curr_kind; format_to!(res, "{}", token); - if let Some(map) = map { - if let Some(span) = map.span_for_range(token.text_range()) { + if show_spans || show_ctxt { + let span = map.span_for_range(token.text_range()); + format_to!(res, "#"); + if show_spans { format_to!( res, - "#{:?}:{:?}@{:?}\\{}#", + "{:?}:{:?}@{:?}", span.anchor.file_id, span.anchor.ast_id.into_raw(), span.range, - span.ctx ); } + if show_ctxt { + format_to!(res, "\\{}", span.ctx); + } + format_to!(res, "#"); } } res diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 29374945f647a..548f22499b315 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -74,6 +74,7 @@ fn foo() { } #[test] +#[ignore] // TODO fn attribute_macro_syntax_completion_2() { // common case of dot completion while typing check( @@ -168,21 +169,21 @@ fn float_attribute_mapping() { check( r#" //- proc_macros: identity -//+spans +//+spans+syntaxctxt #[proc_macros::identity] fn foo(&self) { self.0. 1; } "#, expect![[r#" -//+spans +//+spans+syntaxctxt #[proc_macros::identity] fn foo(&self) { self.0. 1; } -fn#FileId(0):1@34..36\0# foo#FileId(0):1@37..40\0#(#FileId(0):1@40..41\0#&#FileId(0):1@41..42\0#self#FileId(0):1@42..46\0# )#FileId(0):1@46..47\0# {#FileId(0):1@48..49\0# - self#FileId(0):1@54..58\0# .#FileId(0):1@58..59\0#0#FileId(0):1@59..60\0#.#FileId(0):1@60..61\0#1#FileId(0):1@62..63\0#;#FileId(0):1@63..64\0# -}#FileId(0):1@65..66\0#"#]], +fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0# + self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0# +}#FileId(0):1@76..77\0#"#]], ); } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index fef13604225a9..599010e542ca1 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1219,7 +1219,7 @@ impl DefCollector<'_> { }; if matches!( def, - MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. } + MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } if expander.is_derive() ) { // Resolved to `#[derive]` diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index ee49dfa44c80c..9b499c5619efc 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -4,7 +4,6 @@ use std::iter; use crate::{lower::LowerCtx, type_ref::ConstRef}; -use base_db::span::SyntaxContextId; use hir_expand::{ mod_path::resolve_crate_root, name::{name, AsName}, @@ -40,11 +39,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option) -> Option PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs index b341b8cfbd961..2bf02b49c2474 100644 --- a/crates/hir-def/src/visibility.rs +++ b/crates/hir-def/src/visibility.rs @@ -2,7 +2,7 @@ use std::iter; -use hir_expand::{InFile, SpanMap}; +use hir_expand::{span::SpanMapRef, InFile}; use la_arena::ArenaMap; use syntax::ast; use triomphe::Arc; @@ -34,13 +34,13 @@ impl RawVisibility { db: &dyn DefDatabase, node: InFile>, ) -> RawVisibility { - Self::from_ast_with_hygiene(db, node.value, &db.span_map(node.file_id)) + Self::from_ast_with_hygiene(db, node.value, db.span_map(node.file_id).as_ref()) } pub(crate) fn from_ast_with_hygiene( db: &dyn DefDatabase, node: Option, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, ) -> RawVisibility { Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene) } @@ -49,7 +49,7 @@ impl RawVisibility { db: &dyn DefDatabase, node: Option, default: RawVisibility, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, ) -> RawVisibility { let node = match node { None => return default, diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index eb43ae37e08a6..2d24496ab7013 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -19,6 +19,33 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; pub use base_db::span::ErasedFileAstId; +use crate::db; + +/// `AstId` points to an AST node in any file. +/// +/// It is stable across reparses, and can be used as salsa key/value. +pub type AstId = crate::InFile>; + +impl AstId { + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { + self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) + } + pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile { + crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) + } + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr { + db.ast_id_map(self.file_id).get(self.value) + } +} + +pub type ErasedAstId = crate::InFile; + +impl ErasedAstId { + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { + db.ast_id_map(self.file_id).get_erased(self.value) + } +} + /// `AstId` points to an AST node in a specific file. pub struct FileAstId { raw: ErasedFileAstId, @@ -141,9 +168,9 @@ impl AstIdMap { bdfs(node, |it| { if should_alloc_id(it.kind()) { res.alloc(&it); - true + TreeOrder::BreadthFirst } else { - false + TreeOrder::DepthFirst } }); res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ()); @@ -174,7 +201,7 @@ impl AstIdMap { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } - pub fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { + pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr { self.arena[id].clone() } @@ -202,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 { hasher.finish() } +#[derive(Copy, Clone, PartialEq, Eq)] +enum TreeOrder { + BreadthFirst, + DepthFirst, +} + /// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs /// order? It is a mix of breadth-first and depth first orders. Nodes for which -/// `f` returns true are visited breadth-first, all the other nodes are explored -/// depth-first. +/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored +/// [`TreeOrder::DepthFirst`]. /// /// In other words, the size of the bfs queue is bound by the number of "true" /// nodes. -fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) { +fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) { let mut curr_layer = vec![node.clone()]; let mut next_layer = vec![]; while !curr_layer.is_empty() { @@ -218,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) { while let Some(event) = preorder.next() { match event { syntax::WalkEvent::Enter(node) => { - if f(node.clone()) { + if f(node.clone()) == TreeOrder::BreadthFirst { next_layer.extend(node.children()); preorder.skip_subtree(); } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 76c787721b55a..c4937ae08b1e1 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,11 +1,7 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops}; -use ::tt::SpanAnchor as _; -use base_db::{ - span::{SpanAnchor, SyntaxContextId}, - CrateId, -}; +use base_db::{span::SyntaxContextId, CrateId}; use cfg::CfgExpr; use either::Either; use intern::Interned; @@ -17,8 +13,9 @@ use triomphe::Arc; use crate::{ db::ExpandDatabase, mod_path::ModPath, + span::SpanMapRef, tt::{self, Subtree}, - InFile, SpanMap, + InFile, }; /// Syntactical attributes, without filtering of `cfg_attr`s. @@ -44,22 +41,19 @@ impl RawAttrs { pub fn new( db: &dyn ExpandDatabase, - span_anchor: SpanAnchor, owner: &dyn ast::HasAttrs, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, ) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, span_anchor, meta, hygiene, id)) + attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) } Either::Right(comment) => comment.doc_comment().map(|doc| Attr { id, input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), path: Interned::new(ModPath::from(crate::name!(doc))), - ctxt: hygiene - .span_for_range(comment.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx), + ctxt: hygiene.span_for_range(comment.syntax().text_range()).ctx, }), }) .collect::>(); @@ -71,10 +65,10 @@ impl RawAttrs { pub fn from_attrs_owner( db: &dyn ExpandDatabase, - span_anchor: SpanAnchor, owner: InFile<&dyn ast::HasAttrs>, + hygiene: SpanMapRef<'_>, ) -> Self { - Self::new(db, span_anchor, owner.value, &db.span_map(owner.file_id)) + Self::new(db, owner.value, hygiene) } pub fn merge(&self, other: Self) -> Self { @@ -221,9 +215,8 @@ impl fmt::Display for AttrInput { impl Attr { fn from_src( db: &dyn ExpandDatabase, - span_anchor: SpanAnchor, ast: ast::Meta, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, id: AttrId, ) -> Option { let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); @@ -234,31 +227,20 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - // FIXME: We could also allocate ids for attributes and use the attribute itself as an anchor - let offset = - db.ast_id_map(span_anchor.file_id).get_raw(span_anchor.ast_id).text_range().start(); - let tree = syntax_node_to_token_tree(tt.syntax(), span_anchor, offset, hygiene); + let tree = syntax_node_to_token_tree(tt.syntax(), hygiene); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; - Some(Attr { - id, - path, - input, - ctxt: hygiene - .span_for_range(ast.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx), - }) + Some(Attr { id, path, input, ctxt: hygiene.span_for_range(ast.syntax().text_range()).ctx }) } fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { // FIXME: Unecessary roundtrip tt -> ast -> tt - let (parse, _map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); let ast = ast::Meta::cast(parse.syntax_node())?; - // FIXME: we discard spans here! - Self::from_src(db, SpanAnchor::DUMMY, ast, &SpanMap::default(), id) + Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id) } pub fn path(&self) -> &ModPath { @@ -331,7 +313,10 @@ impl Attr { return None; } let path = meta.path()?; - Some((ModPath::from_src(db, path, &span_map)?, call_site)) + Some(( + ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, + call_site, + )) }); Some(paths) diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index de8c0ac810bf7..c16b881df826d 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -79,9 +79,8 @@ fn dummy_attr_expand( /// /// As such, we expand `#[derive(Foo, bar::Bar)]` into /// ``` -/// #[Foo] -/// #[bar::Bar] -/// (); +/// #![Foo] +/// #![bar::Bar] /// ``` /// which allows fallback path resolution in hir::Semantics to properly identify our derives. /// Since we do not expand the attribute in nameres though, we keep the original item. @@ -124,12 +123,10 @@ pub fn pseudo_derive_attr_expansion( .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) { token_trees.push(mk_leaf('#')); + token_trees.push(mk_leaf('!')); token_trees.push(mk_leaf('[')); token_trees.extend(tt.iter().cloned()); token_trees.push(mk_leaf(']')); } - token_trees.push(mk_leaf('(')); - token_trees.push(mk_leaf(')')); - token_trees.push(mk_leaf(';')); ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees }) } diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 16cce35c13d9f..e9d137d990072 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,20 +1,19 @@ //! Builtin derives. use ::tt::Span; -use base_db::{CrateOrigin, LangCrateOrigin}; +use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; use itertools::izip; use rustc_hash::FxHashSet; use stdx::never; use tracing::debug; use crate::{ + hygiene::span_with_def_site_ctxt, name::{AsName, Name}, - tt, SpanMap, -}; -use syntax::{ - ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}, - TextSize, + span::SpanMapRef, + tt, }; +use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; @@ -31,12 +30,15 @@ macro_rules! register_builtin { db: &dyn ExpandDatabase, id: MacroCallId, tt: &ast::Adt, - token_map: &SpanMap, + token_map: SpanMapRef<'_>, ) -> ExpandResult { let expander = match *self { $( BuiltinDeriveExpander::$trait => $expand, )* }; - expander(db, id, tt, token_map) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, span, tt, token_map) } fn find_by_name(name: &name::Name) -> Option { @@ -119,7 +121,7 @@ impl VariantShape { } } - fn from(tm: &SpanMap, value: Option) -> Result { + fn from(tm: SpanMapRef<'_>, value: Option) -> Result { let r = match value { None => VariantShape::Unit, Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( @@ -191,7 +193,7 @@ struct BasicAdtInfo { associated_types: Vec, } -fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result { +fn parse_adt(tm: SpanMapRef<'_>, adt: &ast::Adt) -> Result { let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), @@ -236,44 +238,21 @@ fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result match this { Some(it) => { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree( - it.syntax(), - tm.span_for_range(it.syntax().first_token().unwrap().text_range()) - .unwrap() - .anchor, - TextSize::from(0), - tm, - ) + mbe::syntax_node_to_token_tree(it.syntax(), tm) } None => tt::Subtree::empty(), } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| { - mbe::syntax_node_to_token_tree( - it.syntax(), - tm.span_for_range(it.syntax().first_token().unwrap().text_range()) - .unwrap() - .anchor, - TextSize::from(0), - tm, - ) - }), + ast::TypeOrConstParam::Type(it) => { + it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) + } ast::TypeOrConstParam::Const(_) => None, }; let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = param .ty() - .map(|ty| { - mbe::syntax_node_to_token_tree( - ty.syntax(), - tm.span_for_range(ty.syntax().first_token().unwrap().text_range()) - .unwrap() - .anchor, - TextSize::from(0), - tm, - ) - }) + .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) .unwrap_or_else(tt::Subtree::empty); Some(ty) } else { @@ -307,25 +286,21 @@ fn parse_adt(tm: &SpanMap, adt: &ast::Adt) -> Result let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|it| { - mbe::syntax_node_to_token_tree( - it.syntax(), - tm.span_for_range(it.syntax().first_token().unwrap().text_range()).unwrap().anchor, - TextSize::from(0), - tm, - ) - }) + .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .collect(); - let name_token = name_to_token(&tm, name)?; + let name_token = name_to_token(tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) } -fn name_to_token(token_map: &SpanMap, name: Option) -> Result { +fn name_to_token( + token_map: SpanMapRef<'_>, + name: Option, +) -> Result { let name = name.ok_or_else(|| { debug!("parsed item has no name"); ExpandError::other("missing name") })?; - let span = token_map.span_for_range(name.syntax().text_range()).unwrap(); + let span = token_map.span_for_range(name.syntax().text_range()); let name_token = tt::Ident { span, text: name.text().into() }; Ok(name_token) } @@ -362,8 +337,10 @@ fn name_to_token(token_map: &SpanMap, name: Option) -> Result, trait_path: tt::Subtree, make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree, ) -> ExpandResult { @@ -423,21 +400,23 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) + expand_simple_derive(span, tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) } fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; @@ -491,11 +470,12 @@ fn and_and() -> tt::Subtree { fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::default::Default }, |adt| { let body = match &adt.shape { AdtShape::Struct(fields) => { let name = &adt.name; @@ -531,11 +511,12 @@ fn default_expand( fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::fmt::Debug }, |adt| { let for_variant = |name: String, v: &VariantShape| match v { VariantShape::Struct(fields) => { let for_fields = fields.iter().map(|it| { @@ -609,11 +590,12 @@ fn debug_expand( fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::hash::Hash }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here return quote! {}; @@ -660,21 +642,23 @@ fn hash_expand( fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) + expand_simple_derive(span, tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) } fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here return quote! {}; @@ -738,11 +722,12 @@ fn self_and_other_patterns( fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::cmp::Ord }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, @@ -800,11 +785,12 @@ fn ord_expand( fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, + span: SpanData, tt: &ast::Adt, - tm: &SpanMap, + tm: SpanMapRef<'_>, ) -> ExpandResult { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { + expand_simple_derive(span, tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 2a541a36735b6..9fb04a2f1b081 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -556,9 +556,10 @@ pub(crate) fn include_arg_to_tt( let path = parse_string(&arg.0)?; let file_id = relative_file(db, *arg_id, &path, false)?; + // why are we not going through a SyntaxNode here? let subtree = parse_to_token_tree( + SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, &db.file_text(file_id), - SpanAnchor { file_id: file_id.into(), ast_id: ROOT_ERASED_FILE_AST_ID }, ) .ok_or(mbe::ExpandError::ConversionError)?; Ok((triomphe::Arc::new(subtree), file_id)) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index e176bef78b5ad..393e391f05170 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -3,15 +3,15 @@ use ::tt::{SpanAnchor as _, SyntaxContext}; use base_db::{ salsa, - span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, - CrateId, Edition, SourceDatabase, + span::{SpanAnchor, SyntaxContextId}, + CrateId, Edition, FileId, SourceDatabase, }; use either::Either; use limit::Limit; -use mbe::{map_from_syntax_node, syntax_node_to_token_tree, ValueResult}; +use mbe::{syntax_node_to_token_tree, ValueResult}; use syntax::{ ast::{self, HasAttrs, HasDocComments}, - AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, TextSize, T, + AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, }; use triomphe::Arc; @@ -21,9 +21,10 @@ use crate::{ builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, hygiene::{self, SyntaxContextData, Transparency}, + span::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, - ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, SpanMap, + ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -102,10 +103,11 @@ pub trait ExpandDatabase: SourceDatabase { fn parse_macro_expansion( &self, macro_file: MacroFile, - ) -> ExpandResult<(Parse, Arc)>; - // FIXME: This always allocates one for non macro files which is wasteful. + ) -> ExpandResult<(Parse, Arc)>; #[salsa::transparent] - fn span_map(&self, file_id: HirFileId) -> Arc; + fn span_map(&self, file_id: HirFileId) -> SpanMap; + + fn real_span_map(&self, file_id: FileId) -> Arc; /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// reason why we use salsa at all. @@ -164,13 +166,20 @@ pub trait ExpandDatabase: SourceDatabase { ) -> ExpandResult>; } -fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc { +#[inline] +pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap { match file_id.repr() { - HirFileIdRepr::FileId(_) => Arc::new(Default::default()), - HirFileIdRepr::MacroFile(m) => db.parse_macro_expansion(m).value.1, + HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)), + HirFileIdRepr::MacroFile(m) => { + SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1) + } } } +pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc { + Arc::new(RealSpanMap::from_file(db, file_id)) +} + /// This expands the given macro call, but with different arguments. This is /// used for completion, where we want to see what 'would happen' if we insert a /// token. The `token_to_map` mapped down into the expansion, with the mapped @@ -181,17 +190,15 @@ pub fn expand_speculative( speculative_args: &SyntaxNode, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { + // FIXME spanmaps let loc = db.lookup_intern_macro_call(actual_macro_call); - let file_id = loc.kind.file_id(); // Build the subtree and token mapping for the speculative args let _censor = censor_for_macro_input(&loc, speculative_args); let mut tt = mbe::syntax_node_to_token_tree( speculative_args, // we don't leak these spans into any query so its fine to make them absolute - SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - TextSize::new(0), - &Default::default(), + SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), ); let attr_arg = match loc.kind { @@ -211,9 +218,7 @@ pub fn expand_speculative( Some(token_tree) => { let mut tree = syntax_node_to_token_tree( token_tree.syntax(), - SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - TextSize::new(0), - &Default::default(), + SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), ); tree.delimiter = tt::Delimiter::UNSPECIFIED; @@ -242,12 +247,7 @@ pub fn expand_speculative( db, actual_macro_call, &adt, - &map_from_syntax_node( - speculative_args, - // we don't leak these spans into any query so its fine to make them absolute - SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - TextSize::new(0), - ), + SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), ) } MacroDefKind::Declarative(it) => { @@ -261,15 +261,13 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); - let (node, mut rev_tmap) = - token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); - rev_tmap.real_file = false; + let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); let token = rev_tmap .ranges_with_span(tt::SpanData { range: token_to_map.text_range(), - anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + anchor: SpanAnchor::DUMMY, ctx: SyntaxContextId::DUMMY, }) .filter_map(|range| syntax_node.covering_element(range).into_token()) @@ -310,7 +308,7 @@ fn parse_or_expand_with_err( fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFile, -) -> ExpandResult<(Parse, Arc)> { +) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); @@ -319,8 +317,7 @@ fn parse_macro_expansion( tracing::debug!("expanded = {}", tt.as_debug_string()); tracing::debug!("kind = {:?}", expand_to); - let (parse, mut rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); - rev_token_map.real_file = false; + let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -366,18 +363,21 @@ fn macro_arg( { ValueResult::ok(Some(Arc::new(arg.0.clone()))) } else { + //FIXME: clean this up, the ast id map lookup is done twice here let (parse, map) = match loc.kind.file_id().repr() { HirFileIdRepr::FileId(file_id) => { - (db.parse(file_id).to_syntax(), Arc::new(Default::default())) + let syntax = db.parse(file_id).to_syntax(); + + (syntax, SpanMap::RealSpanMap(db.real_span_map(file_id))) } HirFileIdRepr::MacroFile(macro_file) => { let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse, map) + (parse, SpanMap::ExpansionSpanMap(map)) } }; let root = parse.syntax_node(); - let (syntax, offset, ast_id) = match loc.kind { + let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); @@ -386,7 +386,7 @@ fn macro_arg( if let Some(e) = mismatched_delimiters(&tt) { return ValueResult::only_err(e); } - (tt, offset, ast_id.value.erase()) + tt } None => { return ValueResult::only_err(Arc::new(Box::new([ @@ -396,15 +396,9 @@ fn macro_arg( } } MacroCallKind::Derive { ast_id, .. } => { - let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); - let offset = syntax_node.text_range().start(); - (syntax_node, offset, ast_id.value.erase()) - } - MacroCallKind::Attr { ast_id, .. } => { - let syntax_node = ast_id.to_ptr(db).to_node(&root).syntax().clone(); - let offset = syntax_node.text_range().start(); - (syntax_node, offset, ast_id.value.erase()) + ast_id.to_ptr(db).to_node(&root).syntax().clone() } + MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; let censor = censor_for_macro_input(&loc, &syntax); // let mut fixups = fixup::fixup_syntax(&node); @@ -416,13 +410,8 @@ fn macro_arg( // fixups.replace, // fixups.append, // ); - let mut tt = mbe::syntax_node_to_token_tree_censored( - &syntax, - SpanAnchor { file_id: loc.kind.file_id(), ast_id }, - offset, - &map, - censor, - ); + + let mut tt = mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor); if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included @@ -492,18 +481,19 @@ fn decl_macro_expander( let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; let (root, map) = match id.file_id.repr() { HirFileIdRepr::FileId(file_id) => { - (db.parse(file_id).syntax_node(), Arc::new(Default::default())) + // FIXME: Arc + // FIXME: id.to_ptr duplicated, expensive + (db.parse(file_id).syntax_node(), SpanMap::RealSpanMap(db.real_span_map(file_id))) } HirFileIdRepr::MacroFile(macro_file) => { let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse.syntax_node(), map) + (parse.syntax_node(), SpanMap::ExpansionSpanMap(map)) } }; let transparency = |node| { // ... would be nice to have the item tree here - let attrs = - RawAttrs::new(db, SpanAnchor::DUMMY, node, &Default::default()).filter(db, def_crate); + let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); match &*attrs .iter() .find(|it| { @@ -526,12 +516,7 @@ fn decl_macro_expander( ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, - macro_rules.syntax().text_range().start(), - &map, - ); + let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); mac } @@ -545,12 +530,7 @@ fn decl_macro_expander( ast::Macro::MacroDef(macro_def) => ( match macro_def.body() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( - arg.syntax(), - SpanAnchor { file_id: id.file_id, ast_id: id.value.erase() }, - macro_def.syntax().text_range().start(), - &map, - ); + let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); mac } @@ -591,10 +571,16 @@ fn macro_expand( // FIXME: add firewall query for this? let hir_file_id = loc.kind.file_id(); let (root, map) = match hir_file_id.repr() { - HirFileIdRepr::FileId(file_id) => (db.parse(file_id).syntax_node(), None), + HirFileIdRepr::FileId(file_id) => { + // FIXME: query for span map + ( + db.parse(file_id).syntax_node(), + SpanMap::RealSpanMap(db.real_span_map(file_id)), + ) + } HirFileIdRepr::MacroFile(macro_file) => { let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse.syntax_node(), Some(map)) + (parse.syntax_node(), SpanMap::ExpansionSpanMap(map)) } }; let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; @@ -602,23 +588,7 @@ fn macro_expand( // FIXME: we might need to remove the spans from the input to the derive macro here let _censor = censor_for_macro_input(&loc, node.syntax()); - let _t; - expander.expand( - db, - macro_call_id, - &node, - match &map { - Some(map) => map, - None => { - _t = map_from_syntax_node( - node.syntax(), - SpanAnchor { file_id: hir_file_id, ast_id: ast_id.value.erase() }, - node.syntax().text_range().start(), - ); - &_t - } - }, - ) + expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { let ValueResult { value, err } = db.macro_arg(macro_call_id); @@ -732,7 +702,7 @@ fn token_tree_to_syntax_node( db: &dyn ExpandDatabase, tt: &tt::Subtree, expand_to: ExpandTo, -) -> (Parse, SpanMap) { +) -> (Parse, ExpansionSpanMap) { let entry_point = match expand_to { ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Items => mbe::TopEntryPoint::MacroItems, @@ -741,14 +711,14 @@ fn token_tree_to_syntax_node( ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point); - // now what the hell is going on here + // FIXME: now what the hell is going on here tm.1.span_map.sort_by(|(_, a), (_, b)| { a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| { - let map = db.ast_id_map(a.anchor.file_id); - map.get_raw(a.anchor.ast_id) + let map = db.ast_id_map(a.anchor.file_id.into()); + map.get_erased(a.anchor.ast_id) .text_range() .start() - .cmp(&map.get_raw(b.anchor.ast_id).text_range().start()) + .cmp(&map.get_erased(b.anchor.ast_id).text_range().start()) }) }); tm diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index dc6507a92de54..bcb5383c3f9b6 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -19,7 +19,7 @@ //! //! See the full discussion : use base_db::{ - span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + span::{SpanAnchor, SyntaxContextId}, CrateId, }; use rustc_hash::FxHashMap; @@ -30,8 +30,9 @@ use crate::{ ast::{self, AstNode}, db::ExpandDatabase, mod_path::ModPath, - EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, SpanMap, + span::{RealSpanMap, SpanMapRef}, + EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; pub fn expand_eager_macro_input( @@ -39,6 +40,7 @@ pub fn expand_eager_macro_input( krate: CrateId, macro_call: InFile, def: MacroDefId, + call_site: SyntaxContextId, resolver: &dyn Fn(ModPath) -> Option, ) -> ExpandResult> { let ast_map = db.ast_id_map(macro_call.file_id); @@ -55,18 +57,10 @@ pub fn expand_eager_macro_input( krate, eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, - // FIXME - call_site: SyntaxContextId::ROOT, + call_site, }); let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); - // we need this map here as the expansion of the eager input fake file loses whitespace ... - // let mut ws_mapping = FxHashMap::default(); - // if let Some((tm)) = db.macro_arg(arg_id).value.as_deref() { - // ws_mapping.extend(tm.entries().filter_map(|(id, range)| { - // Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) - // })); - // } let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( @@ -74,6 +68,7 @@ pub fn expand_eager_macro_input( &arg_exp_map, InFile::new(arg_id.as_file(), arg_exp.syntax_node()), krate, + call_site, resolver, ) }; @@ -83,44 +78,12 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; + // FIXME: Spans! let mut subtree = mbe::syntax_node_to_token_tree( &expanded_eager_input, - // is this right? - SpanAnchor { file_id: arg_id.as_file(), ast_id: ROOT_ERASED_FILE_AST_ID }, - TextSize::new(0), - // FIXME: Spans! `eager_macro_recur` needs to fill out a span map for us - &Default::default(), + RealSpanMap::empty(::DUMMY.file_id), ); - // let og_tmap = if let Some(tt) = macro_call.value.token_tree() { - // let mut ids_used = FxHashSet::default(); - // let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax()); - // // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside - // // so we need to remap them to the original input of the eager macro. - // subtree.visit_ids(&mut |id| { - // // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix - - // if let Some(range) = expanded_eager_input_token_map - // .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) - // { - // // remap from expanded eager input to eager input expansion - // if let Some(og_range) = mapping.get(&range) { - // // remap from eager input expansion to original eager input - // if let Some(&og_range) = ws_mapping.get(og_range) { - // if let Some(og_token) = og_tmap.token_by_range(og_range) { - // ids_used.insert(og_token); - // return og_token; - // } - // } - // } - // } - // tt::TokenId::UNSPECIFIED - // }); - // og_tmap.filter(|id| ids_used.contains(&id)); - // og_tmap - // } else { - // Default::default() - // }; subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED; let loc = MacroCallLoc { @@ -132,8 +95,7 @@ pub fn expand_eager_macro_input( error: err.clone(), })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, - // FIXME - call_site: SyntaxContextId::ROOT, + call_site, }; ExpandResult { value: Some(db.intern_macro_call(loc)), err } @@ -144,7 +106,8 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile, krate: CrateId, -) -> ExpandResult<(InFile>, Arc)> { + call_site: SyntaxContextId, +) -> ExpandResult<(InFile>, Arc)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -153,8 +116,8 @@ fn lazy_expand( db, krate, MacroCallKind::FnLike { ast_id, expand_to }, - // FIXME - SyntaxContextId::ROOT, + // FIXME: This is wrong + call_site, ); let macro_file = id.as_macro_file(); @@ -164,9 +127,10 @@ fn lazy_expand( fn eager_macro_recur( db: &dyn ExpandDatabase, - hygiene: &SpanMap, + hygiene: &ExpansionSpanMap, curr: InFile, krate: CrateId, + call_site: SyntaxContextId, macro_resolver: &dyn Fn(ModPath) -> Option, ) -> ExpandResult)>> { let original = curr.value.clone_for_update(); @@ -204,7 +168,10 @@ fn eager_macro_recur( continue; } }; - let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { + let def = match call + .path() + .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(hygiene))) + { Some(path) => match macro_resolver(path.clone()) { Some(def) => def, None => { @@ -225,6 +192,8 @@ fn eager_macro_recur( krate, curr.with_value(call.clone()), def, + // FIXME: This call site is not quite right I think? We probably need to mark it? + call_site, macro_resolver, ); match value { @@ -260,7 +229,7 @@ fn eager_macro_recur( | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { let ExpandResult { value: (parse, tm), err } = - lazy_expand(db, &def, curr.with_value(call.clone()), krate); + lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site); // replace macro inside let ExpandResult { value, err: error } = eager_macro_recur( @@ -269,6 +238,7 @@ fn eager_macro_recur( // FIXME: We discard parse errors here parse.as_ref().map(|it| it.syntax_node()), krate, + call_site, macro_resolver, ); let err = err.or(error); diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs new file mode 100644 index 0000000000000..45875d949817e --- /dev/null +++ b/crates/hir-expand/src/files.rs @@ -0,0 +1,293 @@ +use std::iter; + +use base_db::{ + span::{HirFileId, HirFileIdRepr, MacroFile, SyntaxContextId}, + FileRange, +}; +use either::Either; +use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange}; + +use crate::{db, ExpansionInfo, HirFileIdExt as _}; + +// FIXME: Make an InRealFile wrapper +/// `InFile` stores a value of `T` inside a particular file/syntax tree. +/// +/// Typical usages are: +/// +/// * `InFile` -- syntax node in a file +/// * `InFile` -- ast node in a file +/// * `InFile` -- offset in a file +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct InFile { + pub file_id: HirFileId, + pub value: T, +} + +impl InFile { + pub fn new(file_id: HirFileId, value: T) -> InFile { + InFile { file_id, value } + } + + pub fn with_value(&self, value: U) -> InFile { + InFile::new(self.file_id, value) + } + + pub fn map U, U>(self, f: F) -> InFile { + InFile::new(self.file_id, f(self.value)) + } + + pub fn as_ref(&self) -> InFile<&T> { + self.with_value(&self.value) + } + + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_or_expand(self.file_id) + } +} + +impl InFile<&T> { + pub fn cloned(&self) -> InFile { + self.with_value(self.value.clone()) + } +} + +impl InFile> { + pub fn transpose(self) -> Option> { + let value = self.value?; + Some(InFile::new(self.file_id, value)) + } +} + +impl InFile> { + pub fn transpose(self) -> Either, InFile> { + match self.value { + Either::Left(l) => Either::Left(InFile::new(self.file_id, l)), + Either::Right(r) => Either::Right(InFile::new(self.file_id, r)), + } + } +} + +impl InFile<&SyntaxNode> { + pub fn ancestors_with_macros( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + Clone + '_ { + iter::successors(Some(self.cloned()), move |node| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => node.file_id.call_node(db), + }) + } + + /// Skips the attributed item that caused the macro invocation we are climbing up + pub fn ancestors_with_macros_skip_attr_item( + self, + db: &dyn db::ExpandDatabase, + ) -> impl Iterator> + '_ { + let succ = move |node: &InFile| match node.value.parent() { + Some(parent) => Some(node.with_value(parent)), + None => { + let parent_node = node.file_id.call_node(db)?; + if node.file_id.is_attr_macro(db) { + // macro call was an attributed item, skip it + // FIXME: does this fail if this is a direct expansion of another macro? + parent_node.map(|node| node.parent()).transpose() + } else { + Some(parent_node) + } + } + }; + iter::successors(succ(&self.cloned()), succ) + } + + /// Falls back to the macro call range if the node cannot be mapped up fully. + /// + /// For attributes and derives, this will point back to the attribute only. + /// For the entire item use [`InFile::original_file_range_full`]. + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + if let Some((res, ctxt)) = + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + { + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return res; + } + } + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range(db) + } + } + } + + /// Falls back to the macro call range if the node cannot be mapped up fully. + pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + if let Some((res, ctxt)) = + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + { + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return res; + } + } + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range_with_body(db) + } + } + } + + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT)) + } + HirFileIdRepr::MacroFile(mac_file) => { + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range()) + } + } + } + + pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option> { + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input + let Some(file_id) = self.file_id.macro_file() else { + return Some(self.map(Clone::clone)); + }; + if !self.file_id.is_attr_macro(db) { + return None; + } + + let (FileRange { file_id, range }, ctx) = + ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?; + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if !ctx.is_root() { + return None; + } + + let anc = db.parse(file_id).syntax_node().covering_element(range); + let kind = self.value.kind(); + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? + let value = anc.ancestors().find(|it| it.kind() == kind)?; + Some(InFile::new(file_id.into(), value)) + } +} + +impl InFile { + pub fn upmap_once( + self, + db: &dyn db::ExpandDatabase, + ) -> Option>> { + Some(self.file_id.expansion_info(db)?.map_range_up_once(db, self.value.text_range())) + } + + /// Falls back to the macro call range if the node cannot be mapped up fully. + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, + HirFileIdRepr::MacroFile(mac_file) => { + if let Some(res) = self.original_file_range_opt(db) { + return res; + } + // Fall back to whole macro call. + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range(db) + } + } + } + + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some(FileRange { file_id, range: self.value.text_range() }) + } + HirFileIdRepr::MacroFile(_) => { + let (range, ctxt) = ascend_range_up_macros(db, self.map(|it| it.text_range())); + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + Some(range) + } else { + None + } + } + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct InMacroFile { + pub file_id: MacroFile, + pub value: T, +} + +impl From> for InFile { + fn from(macro_file: InMacroFile) -> Self { + InFile { file_id: macro_file.file_id.into(), value: macro_file.value } + } +} + +pub fn ascend_range_up_macros( + db: &dyn db::ExpandDatabase, + range: InFile, +) -> (FileRange, SyntaxContextId) { + match range.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (FileRange { file_id, range: range.value }, SyntaxContextId::ROOT) + } + HirFileIdRepr::MacroFile(m) => { + ExpansionInfo::new(db, m).map_token_range_up(db, range.value) + } + } +} + +impl InFile { + pub fn descendants(self) -> impl Iterator> { + self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) + } + + // FIXME: this should return `Option>` + pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { + // This kind of upmapping can only be achieved in attribute expanded files, + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input + let Some(file_id) = self.file_id.macro_file() else { + return Some(self); + }; + if !self.file_id.is_attr_macro(db) { + return None; + } + + let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id) + .map_node_range_up(db, self.value.syntax().text_range())?; + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if !ctx.is_root() { + return None; + } + + // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? + let anc = db.parse(file_id).syntax_node().covering_element(range); + let value = anc.ancestors().find_map(N::cast)?; + return Some(InFile::new(file_id.into(), value)); + } + + pub fn syntax(&self) -> InFile<&SyntaxNode> { + self.with_value(self.value.syntax()) + } +} diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index f83a9bf2d657c..66d9f679d8941 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -2,7 +2,9 @@ //! //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! this moment, this is horribly incomplete and handles only `$crate`. -use base_db::span::{MacroCallId, SyntaxContextId}; +use std::iter; + +use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; use crate::db::ExpandDatabase; @@ -48,6 +50,39 @@ pub enum Transparency { Opaque, } +pub fn span_with_def_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque) +} + +pub fn span_with_call_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent) +} + +pub fn span_with_mixed_site_ctxt( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, +) -> SpanData { + span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent) +} + +fn span_with_ctxt_from_mark( + db: &dyn ExpandDatabase, + span: SpanData, + expn_id: MacroCallId, + transparency: Transparency, +) -> SpanData { + SpanData { ctx: db.apply_mark(SyntaxContextId::ROOT, expn_id, transparency), ..span } +} + pub(super) fn apply_mark( db: &dyn ExpandDatabase, ctxt: SyntaxContextId, @@ -65,7 +100,7 @@ pub(super) fn apply_mark( call_site_ctxt.normalize_to_macro_rules(db) }; - if call_site_ctxt.is_root(db) { + if call_site_ctxt.is_root() { return apply_mark_internal(db, ctxt, Some(call_id), transparency); } @@ -131,7 +166,6 @@ fn apply_mark_internal( }) } pub trait SyntaxContextExt { - fn is_root(self, db: &dyn ExpandDatabase) -> bool; fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self; fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self; fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self; @@ -148,9 +182,6 @@ fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId { } impl SyntaxContextExt for SyntaxContextId { - fn is_root(self, db: &dyn ExpandDatabase) -> bool { - db.lookup_intern_syntax_context(self).outer_expn.is_none() - } fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self { handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent) } @@ -164,20 +195,20 @@ impl SyntaxContextExt for SyntaxContextId { let data = db.lookup_intern_syntax_context(self); (data.outer_expn, data.outer_transparency) } - fn marks(mut self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)> { - let mut marks = Vec::new(); - while self != SyntaxContextId::ROOT { - marks.push(self.outer_mark(db)); - self = self.parent_ctxt(db); - } + fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)> { + let mut marks = marks_rev(self, db).collect::>(); marks.reverse(); marks } } -// pub(super) fn with_ctxt_from_mark(db: &ExpandDatabase, file_id: HirFileId) { -// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) -// } -// pub(super) fn with_call_site_ctxt(db: &ExpandDatabase, file_id: HirFileId) { -// self.with_ctxt_from_mark(expn_id, Transparency::Transparent) -// } +// FIXME: Make this a SyntaxContextExt method once we have RPIT +pub fn marks_rev( + ctxt: SyntaxContextId, + db: &dyn ExpandDatabase, +) -> impl Iterator, Transparency)> + '_ { + iter::successors(Some(ctxt), move |&mark| { + Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT) + }) + .map(|ctx| ctx.outer_mark(db)) +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 6864f477ae8d2..d2e5e7c3643f8 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -18,25 +18,25 @@ pub mod quote; pub mod eager; pub mod mod_path; pub mod attrs; +pub mod span; +pub mod files; // mod fixup; use triomphe::Arc; -use std::{fmt, hash::Hash, iter}; +use std::{fmt, hash::Hash}; use base_db::{ - span::{HirFileIdRepr, SyntaxContextId}, + span::{HirFileIdRepr, SpanData, SyntaxContextId}, CrateId, FileId, FileRange, ProcMacroKind, }; use either::Either; use syntax::{ - algo::{self, skip_trivia_token}, ast::{self, AstNode, HasDocComments}, - AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, + SyntaxNode, SyntaxToken, TextRange, TextSize, }; use crate::{ - ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId}, attrs::AttrId, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, @@ -44,12 +44,15 @@ use crate::{ db::TokenExpander, mod_path::ModPath, proc_macro::ProcMacroExpander, + span::ExpansionSpanMap, }; +pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; +pub use crate::files::{InFile, InMacroFile}; + pub use base_db::span::{HirFileId, MacroCallId, MacroFile}; pub use mbe::ValueResult; -pub type SpanMap = ::mbe::TokenMap; pub type DeclarativeMacro = ::mbe::DeclarativeMacro; pub mod tt { @@ -103,7 +106,7 @@ impl fmt::Display for ExpandError { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MacroCallLoc { pub def: MacroDefId, - pub(crate) krate: CrateId, + pub krate: CrateId, /// Some if this is a macro call for an eager macro. Note that this is `None` /// for the eager input macro file. eager: Option>, @@ -247,8 +250,7 @@ impl HirFileIdExt for HirFileId { /// Return expansion information if it is a macro-expansion file fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option { - let macro_file = self.macro_file()?; - ExpansionInfo::new(db, macro_file) + Some(ExpansionInfo::new(db, self.macro_file()?)) } fn as_builtin_derive_attr_node( @@ -340,15 +342,14 @@ impl MacroDefId { } pub fn ast_id(&self) -> Either, AstId> { - let id = match self.kind { + match self.kind { MacroDefKind::ProcMacro(.., id) => return Either::Right(id), MacroDefKind::Declarative(id) | MacroDefKind::BuiltIn(_, id) | MacroDefKind::BuiltInAttr(_, id) | MacroDefKind::BuiltInDerive(_, id) - | MacroDefKind::BuiltInEager(_, id) => id, - }; - Either::Left(id) + | MacroDefKind::BuiltInEager(_, id) => Either::Left(id), + } } pub fn is_proc_macro(&self) -> bool { @@ -390,6 +391,18 @@ impl MacroDefId { } impl MacroCallLoc { + pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData { + let ast_id = self.kind.erased_ast_id(); + let file_id = self.kind.file_id(); + let range = db.ast_id_map(file_id).get_erased(ast_id).text_range(); + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range), + HirFileIdRepr::MacroFile(m) => { + db.parse_macro_expansion(m).value.1.span_for_range(range) + } + } + } + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile { match self.kind { MacroCallKind::FnLike { ast_id, .. } => { @@ -430,17 +443,15 @@ impl MacroCallLoc { match self.kind { MacroCallKind::FnLike { expand_to, .. } => expand_to, MacroCallKind::Derive { .. } => ExpandTo::Items, - MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements, + MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Items, MacroCallKind::Attr { .. } => { - // is this always correct? + // FIXME(stmt_expr_attributes) ExpandTo::Items } } } } -// FIXME: attribute indices do not account for nested `cfg_attr` - impl MacroCallKind { /// Returns the file containing the macro invocation. fn file_id(&self) -> HirFileId { @@ -451,6 +462,14 @@ impl MacroCallKind { } } + fn erased_ast_id(&self) -> ErasedFileAstId { + match *self { + MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(), + MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(), + MacroCallKind::Attr { ast_id: InFile { value, .. }, .. } => value.erase(), + } + } + /// Returns the original file range that best describes the location of this macro call. /// /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives. @@ -518,34 +537,40 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::ExpandDatabase) -> Option> { + // FIXME: -> InFile it should be impossible for the token tree to be missing at + // this point! + fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile> { match self { - MacroCallKind::FnLike { ast_id, .. } => ast_id - .to_in_file_node(db) - .map(|it| Some(it.token_tree()?.syntax().clone())) - .transpose(), + MacroCallKind::FnLike { ast_id, .. } => { + ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone())) + } MacroCallKind::Derive { ast_id, .. } => { - Some(ast_id.to_in_file_node(db).syntax().cloned()) + ast_id.to_in_file_node(db).syntax().cloned().map(Some) } MacroCallKind::Attr { ast_id, .. } => { - Some(ast_id.to_in_file_node(db).syntax().cloned()) + ast_id.to_in_file_node(db).syntax().cloned().map(Some) } } } } /// ExpansionInfo mainly describes how to map text range between src and expanded macro +// FIXME: can be expensive to create, we should check the use sites and maybe replace them with +// simpler function calls if the map is only used once #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { - expanded: InMacroFile, + pub expanded: InMacroFile, /// The argument TokenTree or item for attributes - arg: InFile, + // FIXME: Can this ever be `None`? + arg: InFile>, /// The `macro_rules!` or attribute input. attr_input_or_mac_def: Option>, macro_def: TokenExpander, macro_arg: Arc, - exp_map: Arc, + exp_map: Arc, + /// [`None`] if the call is in a real file + arg_map: Option>, } impl ExpansionInfo { @@ -554,81 +579,133 @@ impl ExpansionInfo { } pub fn call_node(&self) -> Option> { - Some(self.arg.with_value(self.arg.value.parent()?)) + Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?)) } - /// Map a token down from macro input into the macro expansion. - /// - /// The inner workings of this function differ slightly depending on the type of macro we are dealing with: - /// - declarative: - /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input) - /// , as tokens can mapped in and out of it. - /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy - /// way to map all the tokens. - /// - attribute: - /// Attributes have two different inputs, the input tokentree in the attribute node and the item - /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here - /// as well. Currently this is done by shifting the attribute input by the maximum id of the item. - /// - function-like and derives: - /// Both of these only have one simple call site input so no special handling is required here. - pub fn map_token_down( - &self, - db: &dyn db::ExpandDatabase, - token: InFile<&SyntaxToken>, + /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. + pub fn map_range_down<'a>( + &'a self, + db: &'a dyn db::ExpandDatabase, + FileRange { file_id, range: absolute_range }: FileRange, // FIXME: use this for range mapping, so that we can resolve inline format args _relative_token_offset: Option, - ) -> Option> + '_> { - assert_eq!(token.file_id, self.arg.file_id); + // FIXME: ret ty should be wrapped in InMacroFile + ) -> Option> + 'a> { + // search for all entries in the span map that have the given span and return the + // corresponding text ranges inside the expansion + // FIXME: Make this proper let span_map = &self.exp_map.span_map; let (start, end) = if span_map .first() - .map_or(false, |(_, span)| span.anchor.file_id == token.file_id) + .map_or(false, |(_, span)| span.anchor.file_id == file_id) { - (0, span_map.partition_point(|a| a.1.anchor.file_id == token.file_id)) + (0, span_map.partition_point(|a| a.1.anchor.file_id == file_id)) } else { - let start = span_map.partition_point(|a| a.1.anchor.file_id != token.file_id); - ( - start, - start + span_map[start..].partition_point(|a| a.1.anchor.file_id == token.file_id), - ) + let start = span_map.partition_point(|a| a.1.anchor.file_id != file_id); + (start, start + span_map[start..].partition_point(|a| a.1.anchor.file_id == file_id)) }; - let token_text_range = token.value.text_range(); - let ast_id_map = db.ast_id_map(token.file_id); let tokens = span_map[start..end] .iter() .filter_map(move |(range, span)| { - let offset = ast_id_map.get_raw(span.anchor.ast_id).text_range().start(); + // we need to resolve the relative ranges here to make sure that we are in fact + // considering differently anchored spans (this might occur with proc-macros) + let offset = db + .ast_id_map(span.anchor.file_id.into()) + .get_erased(span.anchor.ast_id) + .text_range() + .start(); let abs_range = span.range + offset; - token_text_range.eq(&abs_range).then_some(*range) + absolute_range.eq(&abs_range).then_some(*range) }) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) } - /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion. - pub fn map_token_up( + /// Maps up the text range out of the expansion hierarchy back into the original file its from. + pub fn map_token_range_up( + &self, + db: &dyn db::ExpandDatabase, + range: TextRange, + ) -> (FileRange, SyntaxContextId) { + debug_assert!(self.expanded.value.text_range().contains_range(range)); + let span = self.exp_map.span_for_range(range); + let anchor_offset = db + .ast_id_map(span.anchor.file_id.into()) + .get_erased(span.anchor.ast_id) + .text_range() + .start(); + (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx) + } + + /// Maps up the text range out of the expansion hierarchy back into the original file its from. + pub fn map_node_range_up( + &self, + db: &dyn db::ExpandDatabase, + range: TextRange, + ) -> Option<(FileRange, SyntaxContextId)> { + debug_assert!(self.expanded.value.text_range().contains_range(range)); + let mut spans = self.exp_map.spans_for_node_range(range); + let SpanData { range, anchor, ctx } = spans.next()?; + let mut start = range.start(); + let mut end = range.end(); + + for span in spans { + if span.anchor != anchor || span.ctx != ctx { + return None; + } + start = start.min(span.range.start()); + end = end.max(span.range.end()); + } + let anchor_offset = + db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some(( + FileRange { + file_id: anchor.file_id, + range: TextRange::new(start, end) + anchor_offset, + }, + ctx, + )) + } + + /// Maps up the text range out of the expansion into is macro call. + pub fn map_range_up_once( &self, db: &dyn db::ExpandDatabase, - token: InFile<&SyntaxToken>, - ) -> Option> { - self.exp_map.span_for_range(token.value.text_range()).and_then(|span| { - let anchor = - db.ast_id_map(span.anchor.file_id).get_raw(span.anchor.ast_id).text_range().start(); - InFile::new( - span.anchor.file_id, - db.parse_or_expand(span.anchor.file_id) - .covering_element(span.range + anchor) - .into_token(), - ) - .transpose() - }) + token: TextRange, + ) -> InFile> { + debug_assert!(self.expanded.value.text_range().contains_range(token)); + let span = self.exp_map.span_for_range(token); + match &self.arg_map { + None => { + let file_id = span.anchor.file_id.into(); + let anchor_offset = + db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); + InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } + } + Some(arg_map) => { + let arg_range = self + .arg + .value + .as_ref() + .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range()); + InFile::new( + self.arg.file_id, + arg_map + .ranges_with_span(span) + .filter(|range| range.intersect(arg_range).is_some()) + .collect(), + ) + } + } } - fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option { + pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let arg_tt = loc.kind.arg(db)?; + let arg_tt = loc.kind.arg(db); + let arg_map = + arg_tt.file_id.macro_file().map(|file| db.parse_macro_expansion(file).value.1); let macro_def = db.macro_expander(loc.def); let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; @@ -662,331 +739,18 @@ impl ExpansionInfo { _ => None, }); - Some(ExpansionInfo { + ExpansionInfo { expanded, arg: arg_tt, attr_input_or_mac_def, macro_arg, macro_def, exp_map, - }) - } -} - -/// `AstId` points to an AST node in any file. -/// -/// It is stable across reparses, and can be used as salsa key/value. -pub type AstId = InFile>; - -impl AstId { - pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { - self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) - } - pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile { - InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) - } - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr { - db.ast_id_map(self.file_id).get(self.value) - } -} - -pub type ErasedAstId = InFile; - -impl ErasedAstId { - pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { - db.ast_id_map(self.file_id).get_raw(self.value) - } -} - -/// `InFile` stores a value of `T` inside a particular file/syntax tree. -/// -/// Typical usages are: -/// -/// * `InFile` -- syntax node in a file -/// * `InFile` -- ast node in a file -/// * `InFile` -- offset in a file -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InFile { - pub file_id: HirFileId, - pub value: T, -} - -impl InFile { - pub fn new(file_id: HirFileId, value: T) -> InFile { - InFile { file_id, value } - } - - pub fn with_value(&self, value: U) -> InFile { - InFile::new(self.file_id, value) - } - - pub fn map U, U>(self, f: F) -> InFile { - InFile::new(self.file_id, f(self.value)) - } - - pub fn as_ref(&self) -> InFile<&T> { - self.with_value(&self.value) - } - - pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id) - } -} - -impl InFile<&T> { - pub fn cloned(&self) -> InFile { - self.with_value(self.value.clone()) - } -} - -impl InFile> { - pub fn transpose(self) -> Option> { - let value = self.value?; - Some(InFile::new(self.file_id, value)) - } -} - -impl InFile> { - pub fn transpose(self) -> Either, InFile> { - match self.value { - Either::Left(l) => Either::Left(InFile::new(self.file_id, l)), - Either::Right(r) => Either::Right(InFile::new(self.file_id, r)), + arg_map, } } } -impl InFile<&SyntaxNode> { - pub fn ancestors_with_macros( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + Clone + '_ { - iter::successors(Some(self.cloned()), move |node| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => node.file_id.call_node(db), - }) - } - - /// Skips the attributed item that caused the macro invocation we are climbing up - pub fn ancestors_with_macros_skip_attr_item( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + '_ { - let succ = move |node: &InFile| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => { - let parent_node = node.file_id.call_node(db)?; - if node.file_id.is_attr_macro(db) { - // macro call was an attributed item, skip it - // FIXME: does this fail if this is a direct expansion of another macro? - parent_node.map(|node| node.parent()).transpose() - } else { - Some(parent_node) - } - } - }; - iter::successors(succ(&self.cloned()), succ) - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - /// - /// For attributes and derives, this will point back to the attribute only. - /// For the entire item use [`InFile::original_file_range_full`]. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range(db) - } - } - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range_with_body(db) - } - } - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { - match ascend_node_border_tokens(db, self) { - Some(InFile { file_id, value: (first, last) }) => { - let original_file = file_id.original_file(db); - let range = first.text_range().cover(last.text_range()); - if file_id != original_file.into() { - tracing::error!("Failed mapping up more for {:?}", range); - return None; - } - Some(FileRange { file_id: original_file, range }) - } - _ if !self.file_id.is_macro() => Some(FileRange { - file_id: self.file_id.original_file(db), - range: self.value.text_range(), - }), - _ => None, - } - } - - pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option> { - // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - if !self.file_id.is_macro() { - return Some(self.map(Clone::clone)); - } else if !self.file_id.is_attr_macro(db) { - return None; - } - - if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self) - { - if file_id.is_macro() { - let range = first.text_range().cover(last.text_range()); - tracing::error!("Failed mapping out of macro file for {:?}", range); - return None; - } - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes - let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; - let kind = self.value.kind(); - let value = anc.ancestors().find(|it| it.kind() == kind)?; - return Some(InFile::new(file_id, value)); - } - None - } -} - -impl InFile { - pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option> { - let expansion = self.file_id.expansion_info(db)?; - expansion.map_token_up(db, self.as_ref()) - } - - /// Falls back to the macro call range if the node cannot be mapped up fully. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range(db) - } - } - } - - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - Some(FileRange { file_id, range: self.value.text_range() }) - } - HirFileIdRepr::MacroFile(_) => { - let expansion = self.file_id.expansion_info(db)?; - let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?; - let original_file = file_id.original_file(db); - if file_id != original_file.into() { - return None; - } - Some(FileRange { file_id: original_file, range: value.text_range() }) - } - } - } -} - -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InMacroFile { - pub file_id: MacroFile, - pub value: T, -} - -impl From> for InFile { - fn from(macro_file: InMacroFile) -> Self { - InFile { file_id: macro_file.file_id.into(), value: macro_file.value } - } -} - -// FIXME: Get rid of this -fn ascend_node_border_tokens( - db: &dyn db::ExpandDatabase, - InFile { file_id, value: node }: InFile<&SyntaxNode>, -) -> Option> { - let expansion = file_id.expansion_info(db)?; - - let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next); - let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev); - - // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore - let first = first_token(node)?; - let last = last_token(node)?; - let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?; - let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?; - (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value))) -} - -fn ascend_call_token( - db: &dyn db::ExpandDatabase, - expansion: &ExpansionInfo, - token: InFile, -) -> Option> { - let mut mapping = expansion.map_token_up(db, token.as_ref())?; - - loop { - match mapping.file_id.expansion_info(db) { - Some(info) => mapping = info.map_token_up(db, mapping.as_ref())?, - None => return Some(mapping), - } - } -} - -impl InFile { - pub fn descendants(self) -> impl Iterator> { - self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) - } - - // FIXME: this should return `Option>` - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { - // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - if !self.file_id.is_macro() { - return Some(self); - } else if !self.file_id.is_attr_macro(db) { - return None; - } - - if let Some(InFile { file_id, value: (first, last) }) = - ascend_node_border_tokens(db, self.syntax()) - { - if file_id.is_macro() { - let range = first.text_range().cover(last.text_range()); - tracing::error!("Failed mapping out of macro file for {:?}", range); - return None; - } - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes - let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?; - let value = anc.ancestors().find_map(N::cast)?; - return Some(InFile::new(file_id, value)); - } - None - } - - pub fn syntax(&self) -> InFile<&SyntaxNode> { - self.with_value(self.value.syntax()) - } -} - /// In Rust, macros expand token trees to token trees. When we want to turn a /// token tree into an AST node, we need to figure out what kind of AST node we /// want: something like `foo` can be a type, an expression, or a pattern. @@ -1051,9 +815,4 @@ impl ExpandTo { } } -#[derive(Debug)] -pub struct UnresolvedMacro { - pub path: ModPath, -} - intern::impl_internable!(ModPath, attrs::AttrInput); diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index 9396b08b2800f..a518f7eb6a9f1 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -7,9 +7,9 @@ use std::{ use crate::{ db::ExpandDatabase, - hygiene::{SyntaxContextExt, Transparency}, + hygiene::{marks_rev, SyntaxContextExt, Transparency}, name::{known, AsName, Name}, - SpanMap, + span::SpanMapRef, }; use base_db::{span::SyntaxContextId, CrateId}; use smallvec::SmallVec; @@ -47,7 +47,7 @@ impl ModPath { pub fn from_src( db: &dyn ExpandDatabase, path: ast::Path, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, ) -> Option { convert_path(db, None, path, hygiene) } @@ -194,7 +194,7 @@ fn convert_path( db: &dyn ExpandDatabase, prefix: Option, path: ast::Path, - hygiene: &SpanMap, + hygiene: SpanMapRef<'_>, ) -> Option { let prefix = match path.qualifier() { Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?), @@ -208,14 +208,14 @@ fn convert_path( if prefix.is_some() { return None; } - resolve_crate_root( - db, - hygiene - .span_for_range(name_ref.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx), + ModPath::from_kind( + resolve_crate_root( + db, + hygiene.span_for_range(name_ref.syntax().text_range()).ctx, + ) + .map(PathKind::DollarCrate) + .unwrap_or(PathKind::Crate), ) - .map(PathKind::DollarCrate) - .map(ModPath::from_kind)? } else { let mut res = prefix.unwrap_or_else(|| { ModPath::from_kind( @@ -265,13 +265,12 @@ fn convert_path( // We follow what it did anyway :) if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctx = hygiene - .span_for_range(segment.syntax().text_range()) - .map_or(SyntaxContextId::ROOT, |s| s.ctx); + let syn_ctx = hygiene.span_for_range(segment.syntax().text_range()).ctx; if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn { if db.lookup_intern_macro_call(macro_call_id).def.local_inner { - if let Some(crate_root) = resolve_crate_root(db, syn_ctx) { - mod_path.kind = PathKind::DollarCrate(crate_root); + mod_path.kind = match resolve_crate_root(db, syn_ctx) { + Some(crate_root) => PathKind::DollarCrate(crate_root), + None => PathKind::Crate, } } } @@ -289,30 +288,19 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> // definitions actually produced by `macro` and `macro` definitions produced by // `macro_rules!`, but at least such configurations are not stable yet. ctxt = ctxt.normalize_to_macro_rules(db); - let mut iter = ctxt.marks(db).into_iter().rev().peekable(); + let mut iter = marks_rev(ctxt, db).peekable(); let mut result_mark = None; // Find the last opaque mark from the end if it exists. - while let Some(&(mark, transparency)) = iter.peek() { - if transparency == Transparency::Opaque { - result_mark = Some(mark); - iter.next(); - } else { - break; - } + while let Some(&(mark, Transparency::Opaque)) = iter.peek() { + result_mark = Some(mark); + iter.next(); } // Then find the last semi-transparent mark from the end if it exists. - for (mark, transparency) in iter { - if transparency == Transparency::SemiTransparent { - result_mark = Some(mark); - } else { - break; - } + while let Some((mark, Transparency::SemiTransparent)) = iter.next() { + result_mark = Some(mark); } - match result_mark { - Some(Some(call)) => Some(db.lookup_intern_macro_call(call.into()).def.krate), - Some(None) | None => None, - } + result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate) } pub use crate::name as __name; diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs new file mode 100644 index 0000000000000..c16d761c17195 --- /dev/null +++ b/crates/hir-expand/src/span.rs @@ -0,0 +1,109 @@ +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, +}; +use mbe::TokenMap; +use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize}; +use triomphe::Arc; + +use crate::db::ExpandDatabase; + +pub type ExpansionSpanMap = TokenMap; + +/// Spanmap for a macro file or a real file +#[derive(Clone, Debug)] +pub enum SpanMap { + /// Spanmap for a macro file + ExpansionSpanMap(Arc), + /// Spanmap for a real file + RealSpanMap(Arc), +} + +#[derive(Copy, Clone)] +pub enum SpanMapRef<'a> { + /// Spanmap for a macro file + ExpansionSpanMap(&'a ExpansionSpanMap), + /// Spanmap for a real file + RealSpanMap(&'a RealSpanMap), +} + +impl mbe::SpanMapper for SpanMap { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} +impl mbe::SpanMapper for SpanMapRef<'_> { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} +impl mbe::SpanMapper for RealSpanMap { + fn span_for(&self, range: TextRange) -> SpanData { + self.span_for_range(range) + } +} + +impl SpanMap { + pub fn span_for_range(&self, range: TextRange) -> SpanData { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_for_range(range), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } + + pub fn as_ref(&self) -> SpanMapRef<'_> { + match self { + Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map), + Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map), + } + } +} + +impl SpanMapRef<'_> { + pub fn span_for_range(self, range: TextRange) -> SpanData { + match self { + Self::ExpansionSpanMap(span_map) => span_map.span_for_range(range), + Self::RealSpanMap(span_map) => span_map.span_for_range(range), + } + } +} + +#[derive(PartialEq, Eq, Hash, Debug)] +pub struct RealSpanMap { + file_id: FileId, + /// Invariant: Sorted vec over TextSize + // FIXME: SortedVec<(TextSize, ErasedFileAstId)>? + pairs: Box<[(TextSize, ErasedFileAstId)]>, +} + +impl RealSpanMap { + pub fn empty(file_id: FileId) -> Self { + RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) } + } + + pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self { + let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]; + let ast_id_map = db.ast_id_map(file_id.into()); + pairs.extend( + db.parse(file_id) + .tree() + .items() + .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())), + ); + RealSpanMap { file_id, pairs: pairs.into_boxed_slice() } + } + + pub fn span_for_range(&self, range: TextRange) -> SpanData { + let start = range.start(); + let idx = self + .pairs + .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less)) + .unwrap_err(); + let (offset, ast_id) = self.pairs[idx - 1]; + SpanData { + range: range - offset, + anchor: SpanAnchor { file_id: self.file_id, ast_id }, + ctx: SyntaxContextId::ROOT, + } + } +} diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index bb02620208bf1..42d1515e59312 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -1,5 +1,6 @@ //! Attributes & documentation for hir types. +use base_db::FileId; use hir_def::{ attr::AttrsWithOwner, item_scope::ItemInNs, @@ -8,7 +9,10 @@ use hir_def::{ resolver::{HasResolver, Resolver, TypeNs}, AssocItemId, AttrDefId, ModuleDefId, }; -use hir_expand::name::Name; +use hir_expand::{ + name::Name, + span::{RealSpanMap, SpanMapRef}, +}; use hir_ty::db::HirDatabase; use syntax::{ast, AstNode}; @@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { if ast_path.syntax().text() != link { return None; } - ModPath::from_src(db.upcast(), ast_path, &Default::default()) + ModPath::from_src( + db.upcast(), + ast_path, + SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))), + ) }; let full = try_get_modpath(link); diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index b224b4b3a97c9..443c19af82d3a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -59,7 +59,7 @@ use hir_def::{ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; -use hir_expand::{name::name, MacroCallKind}; +use hir_expand::{name::name, InMacroFile, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, @@ -3483,11 +3483,41 @@ impl Impl { self.id.lookup(db.upcast()).container.into() } - pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option> { + pub fn as_builtin_derive_attr(self, db: &dyn HirDatabase) -> Option> { let src = self.source(db)?; src.file_id.as_builtin_derive_attr_node(db.upcast()) } + pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option> { + let src = self.source(db)?; + + let macro_file = src.file_id.macro_file()?; + let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let (derive_attr, derive_index) = match loc.kind { + MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { + let module_id = self.id.lookup(db.upcast()).container; + ( + db.crate_def_map(module_id.krate())[module_id.local_id] + .scope + .derive_macro_invoc(ast_id, derive_attr_index)?, + derive_index, + ) + } + _ => return None, + }; + let file_id = MacroFile { macro_call_id: derive_attr }; + let path = db + .parse_macro_expansion(file_id) + .value + .0 + .syntax_node() + .children() + .nth(derive_index as usize) + .and_then(::cast) + .and_then(|it| it.path())?; + Some(InMacroFile { file_id, value: path }) + } + pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool { check_orphan_rules(db, self.id) } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 825d68cdbaed6..137cffa7af150 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -19,6 +19,7 @@ use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, HirFileIdExt, use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; +use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, @@ -529,11 +530,11 @@ impl<'db> SemanticsImpl<'db> { token: SyntaxToken, // FIXME: We might want this to be Option to be able to opt out of subrange // mapping, specifically for node downmapping - offset: TextSize, + _offset: TextSize, f: &mut dyn FnMut(InFile) -> bool, ) { + // FIXME: Clean this up let _p = profile::span("descend_into_macros"); - let relative_token_offset = token.text_range().start().checked_sub(offset); let parent = match token.parent() { Some(it) => it, None => return, @@ -543,13 +544,35 @@ impl<'db> SemanticsImpl<'db> { None => return, }; let def_map = sa.resolver.def_map(); + let absolute_range = match sa.file_id.repr() { + base_db::span::HirFileIdRepr::FileId(file_id) => { + FileRange { file_id, range: token.text_range() } + } + base_db::span::HirFileIdRepr::MacroFile(m) => { + let span = + self.db.parse_macro_expansion(m).value.1.span_for_range(token.text_range()); + let range = span.range + + self + .db + .ast_id_map(span.anchor.file_id.into()) + .get_erased(span.anchor.ast_id) + .text_range() + .start(); + FileRange { file_id: span.anchor.file_id, range } + } + }; + + // fetch span information of token in real file, then use that look through expansions of + // calls the token is in and afterwards recursively with the same span. + // what about things where spans change? Due to being joined etc, that is we don't find the + // exact span anymore? let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, token: InFile<&_>| { + |stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| { let expansion_info = cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) @@ -560,11 +583,8 @@ impl<'db> SemanticsImpl<'db> { self.cache(value, file_id); } - let mapped_tokens = expansion_info.map_token_down( - self.db.upcast(), - token, - relative_token_offset, - )?; + let mapped_tokens = + expansion_info.map_range_down(self.db.upcast(), absolute_range, None)?; let len = stack.len(); // requeue the tokens we got from mapping our current token down @@ -728,6 +748,8 @@ impl<'db> SemanticsImpl<'db> { pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); node.original_file_range_opt(self.db.upcast()) + .filter(|(_, ctx)| ctx.is_root()) + .map(TupleExt::head) } /// Attempts to map the node out of macro expanded files. diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index ca7874c3683c5..6ea926cc22d8c 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -7,7 +7,7 @@ use hir_def::{ AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{HirFileId, InFile}; +use hir_expand::{files::ascend_range_up_macros, HirFileId, InFile}; use hir_ty::db::HirDatabase; use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr}; @@ -50,13 +50,9 @@ impl DeclarationLocation { node.as_ref().original_file_range(db.upcast()) } - pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option { - if let Some(file_id) = self.hir_file_id.file_id() { - // fast path to prevent parsing - return Some(FileRange { file_id, range: self.name_ptr.text_range() }); - } - let node = resolve_node(db, self.hir_file_id, &self.name_ptr); - node.as_ref().original_file_range_opt(db.upcast()) + pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange { + let mapping = InFile::new(self.hir_file_id, self.name_ptr.text_range()); + ascend_range_up_macros(db.upcast(), mapping).0 } } diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs index 2d6234e310c60..e55d1f315fedf 100644 --- a/crates/ide-completion/src/tests/proc_macros.rs +++ b/crates/ide-completion/src/tests/proc_macros.rs @@ -9,6 +9,7 @@ fn check(ra_fixture: &str, expect: Expect) { } #[test] +#[ignore] // todo fn complete_dot_in_attr() { check( r#" @@ -40,6 +41,7 @@ fn main() { } #[test] +#[ignore] // TODO fn complete_dot_in_attr2() { check( r#" diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index cc9038fdfa2fe..9f101468a3500 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -22,10 +22,10 @@ //! Our current behavior is ¯\_(ツ)_/¯. use std::fmt; -use base_db::{AnchoredPathBuf, FileId, FileRange}; +use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange}; use either::Either; use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics}; -use stdx::never; +use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, AstNode, SyntaxKind, TextRange, T, @@ -103,6 +103,7 @@ impl Definition { /// renamed and extern crate names will report its range, though a rename will introduce /// an alias instead. pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option { + let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range); let res = match self { Definition::Macro(mac) => { let src = mac.source(sema.db)?; @@ -110,14 +111,18 @@ impl Definition { Either::Left(it) => it.name()?, Either::Right(it) => it.name()?, }; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } Definition::Field(field) => { let src = field.source(sema.db)?; match &src.value { FieldSource::Named(record_field) => { let name = record_field.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } FieldSource::Pos(_) => None, } @@ -125,25 +130,31 @@ impl Definition { Definition::Module(module) => { let src = module.declaration_source(sema.db)?; let name = src.value.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } - Definition::Function(it) => name_range(it, sema), + Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root), Definition::Adt(adt) => match adt { - hir::Adt::Struct(it) => name_range(it, sema), - hir::Adt::Union(it) => name_range(it, sema), - hir::Adt::Enum(it) => name_range(it, sema), + hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root), + hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root), + hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root), }, - Definition::Variant(it) => name_range(it, sema), - Definition::Const(it) => name_range(it, sema), - Definition::Static(it) => name_range(it, sema), - Definition::Trait(it) => name_range(it, sema), - Definition::TraitAlias(it) => name_range(it, sema), - Definition::TypeAlias(it) => name_range(it, sema), - Definition::Local(it) => name_range(it.primary_source(sema.db), sema), + Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root), + Definition::Local(it) => { + name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root) + } Definition::GenericParam(generic_param) => match generic_param { hir::GenericParam::LifetimeParam(lifetime_param) => { let src = lifetime_param.source(sema.db)?; - src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db) + src.with_value(src.value.lifetime()?.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } _ => { let x = match generic_param { @@ -156,22 +167,30 @@ impl Definition { Either::Left(x) => x.name()?, Either::Right(_) => return None, }; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } }, Definition::Label(label) => { let src = label.source(sema.db); let lifetime = src.value.lifetime()?; - src.with_value(lifetime.syntax()).original_file_range_opt(sema.db) + src.with_value(lifetime.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } Definition::ExternCrateDecl(it) => { let src = it.source(sema.db)?; if let Some(rename) = src.value.rename() { let name = rename.name()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } else { let name = src.value.name_ref()?; - src.with_value(name.syntax()).original_file_range_opt(sema.db) + src.with_value(name.syntax()) + .original_file_range_opt(sema.db) + .and_then(syn_ctx_is_root) } } Definition::BuiltinType(_) => return None, @@ -183,7 +202,10 @@ impl Definition { }; return res; - fn name_range(def: D, sema: &Semantics<'_, RootDatabase>) -> Option + fn name_range( + def: D, + sema: &Semantics<'_, RootDatabase>, + ) -> Option<(FileRange, SyntaxContextId)> where D: HasSource, D::Ast: ast::HasName, @@ -256,8 +278,10 @@ fn rename_mod( let file_id = src.file_id.original_file(sema.db); match src.value.name() { Some(name) => { - if let Some(file_range) = - src.with_value(name.syntax()).original_file_range_opt(sema.db) + if let Some(file_range) = src + .with_value(name.syntax()) + .original_file_range_opt(sema.db) + .map(TupleExt::head) { source_change.insert_source_edit( file_id, @@ -493,7 +517,12 @@ fn source_edit_from_def( for source in local.sources(sema.db) { let source = match source.source.clone().original_ast_node(sema.db) { Some(source) => source, - None => match source.source.syntax().original_file_range_opt(sema.db) { + None => match source + .source + .syntax() + .original_file_range_opt(sema.db) + .map(TupleExt::head) + { Some(FileRange { file_id: file_id2, range }) => { file_id = Some(file_id2); edit.replace(range, new_name.to_owned()); diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index 7ca0a0eab2b6f..2993950be04a0 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} } #[test] fn eager_macro_concat() { - // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic. - // See: https://github.com/rust-lang/rust-analyzer/issues/10300 - check_diagnostics( r#" //- /lib.rs crate:lib deps:core @@ -80,7 +77,6 @@ macro_rules! m { fn f() { m!(); - //^^^^ error: unresolved macro $crate::private::concat } //- /core.rs crate:core diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 4e215a89d7932..ea5c7564d343f 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -33,7 +33,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option> { let db = ctx.sema.db; let root = db.parse_or_expand(d.expr.file_id); - let original_range = + let (original_range, _) = d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; let mut assists = vec![]; diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index bbbd21741e5df..234ca2a7671bd 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -152,9 +152,7 @@ mod baz {} main_node: Some( InFile { file_id: FileId( - FileId( - 0, - ), + 0, ), value: MODULE@0..8 MOD_KW@0..3 "mod" diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index f7c6a0139e0c8..cca3e68d50244 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::{HirFileIdExt, Semantics}; +use hir::{HirFileIdExt, InFile, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, @@ -49,7 +49,9 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); // up map out of the #[derive] expansion - let token = hir::InFile::new(hir_file, descended).upmap(db)?.value; + let InFile { file_id, value: tokens } = + hir::InFile::new(hir_file, descended).upmap_once(db)?; + let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?; let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; let expansions = sema.expand_derive_macro(&attr)?; let idx = attr @@ -338,8 +340,8 @@ fn main() { expect![[r#" match_ast! { - if let Some(it) = ast::TraitDef::cast(container.clone()){} - else if let Some(it) = ast::ImplDef::cast(container.clone()){} + if let Some(it) = ast::TraitDef::cast((container).clone()){} + else if let Some(it) = ast::ImplDef::cast((container).clone()){} else { { continue diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 59e8578cf154c..816f1bebee429 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -401,11 +401,11 @@ fn bar() { //- /lib.rs macro_rules! define_fn { () => (fn foo() {}) - //^^^ + } define_fn!(); - +//^^^^^^^^^^^^^ fn bar() { $0foo(); } diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 544c6b42317eb..3593c5c7dd9c3 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -249,7 +249,7 @@ impl T for &Foo {} r#" //- minicore: copy, derive #[derive(Copy)] -//^^^^^^^^^^^^^^^ + //^^^^ struct Foo$0; "#, ); diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index e0b64fe7988e5..fc0c9ef17fa13 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -166,6 +166,7 @@ fn hover_simple( } else { sema.descend_into_macros_with_same_text(original_token.clone(), offset) }; + dbg!(&descended); let descended = || descended.iter(); let result = descended() diff --git a/crates/ide/src/inlay_hints/closure_captures.rs b/crates/ide/src/inlay_hints/closure_captures.rs index d691303c18b81..2f8b959516df8 100644 --- a/crates/ide/src/inlay_hints/closure_captures.rs +++ b/crates/ide/src/inlay_hints/closure_captures.rs @@ -2,6 +2,7 @@ //! //! Tests live in [`bind_pat`][super::bind_pat] module. use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use stdx::TupleExt; use syntax::ast::{self, AstNode}; use text_edit::{TextRange, TextSize}; @@ -73,7 +74,9 @@ pub(super) fn hints( capture.display_place(sema.db) ), None, - source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)), + source.name().and_then(|name| { + name.syntax().original_file_range_opt(sema.db).map(TupleExt::head) + }), ); acc.push(InlayHint { needs_resolve: label.needs_resolve(), diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 32f211c6b289c..1e4d0e8cdc6e6 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -4,8 +4,8 @@ use std::fmt; use either::Either; use hir::{ - symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId, - InFile, LocalSource, ModuleSource, + db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, + HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, }; use ide_db::{ base_db::{FileId, FileRange}, @@ -40,6 +40,8 @@ pub struct NavigationTarget { /// comments, and `focus_range` is the range of the identifier. /// /// Clients should place the cursor on this range when navigating to this target. + /// + /// This range must be contained within [`Self::full_range`]. pub focus_range: Option, pub name: SmolStr, pub kind: Option, @@ -166,13 +168,9 @@ impl NavigationTarget { impl TryToNav for FileSymbol { fn try_to_nav(&self, db: &RootDatabase) -> Option { let full_range = self.loc.original_range(db); - let focus_range = self.loc.original_name_range(db).and_then(|it| { - if it.file_id == full_range.file_id { - Some(it.range) - } else { - None - } - }); + let focus_range = self.loc.original_name_range(db); + let focus_range = + if focus_range.file_id == full_range.file_id { Some(focus_range.range) } else { None }; Some(NavigationTarget { file_id: full_range.file_id, @@ -363,11 +361,11 @@ impl ToNav for hir::Module { impl TryToNav for hir::Impl { fn try_to_nav(&self, db: &RootDatabase) -> Option { let InFile { file_id, value } = self.source(db)?; - let derive_attr = self.as_builtin_derive(db); + let derive_path = self.as_builtin_derive_path(db); - let (focus, syntax) = match &derive_attr { - Some(attr) => (None, attr.value.syntax()), - None => (value.self_ty(), value.syntax()), + let (file_id, focus, syntax) = match &derive_path { + Some(attr) => (attr.file_id.into(), None, attr.value.syntax()), + None => (file_id, value.self_ty(), value.syntax()), }; let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); @@ -628,19 +626,30 @@ impl TryToNav for hir::ConstParam { } } +/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions +/// Additionally verifies that the name span is in bounds and related to the original range. fn orig_range_with_focus( db: &RootDatabase, hir_file: HirFileId, value: &SyntaxNode, name: Option, ) -> (FileId, TextRange, Option) { - let FileRange { file_id, range: full_range } = - InFile::new(hir_file, value).original_file_range(db); + let FileRange { file_id, range } = + match InFile::new(hir_file, value).original_file_range_opt(db) { + Some((range, ctxt)) if ctxt.is_root() => range, + _ => db + .lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) + .kind + .original_call_range(db), + }; let focus_range = name .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db)) - .and_then(|range| if range.file_id == file_id { Some(range.range) } else { None }); + .filter(|(frange, ctxt)| { + ctxt.is_root() && frange.file_id == file_id && frange.range.contains_range(frange.range) + }) + .map(|(frange, _ctxt)| frange.range); - (file_id, full_range, focus_range) + (file_id, range, focus_range) } #[cfg(test)] diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 7ee50f7a67f8b..cb1236836b08f 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -242,6 +242,7 @@ mod tests { } } + #[track_caller] fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index 64e614cecd20f..aa79cd9b829f7 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -127,7 +127,7 @@ println!("Hello {:+}!", 5); println!("{:#x}!", 27); println!("Hello {:05}!", 5); - println!("Hello {:05}!", -5); + println!("Hello {:05}!", -5); println!("{:#010x}!", 27); println!("Hello {0} is {1:.5}", "x", 0.01); println!("Hello {1} is {2:.0$}", 5, "x", 0.01); diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index eb0773570b325..e7fbb918897d8 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -10,12 +10,12 @@ use tt::{Span, SpanAnchor, SyntaxContext}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, + syntax_node_to_token_tree, DeclarativeMacro, SpanMapper, }; type SpanData = tt::SpanData; -#[derive(PartialEq, Eq, Clone, Copy, Debug)] +#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] struct DummyFile; impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; @@ -27,6 +27,14 @@ impl SyntaxContext for DummyCtx { const DUMMY: Self = DummyCtx; } +struct NoOpMap; + +impl SpanMapper for NoOpMap { + fn span_for(&self, range: syntax::TextRange) -> SpanData { + SpanData { range, anchor: DummyFile, ctx: DummyCtx } + } +} + #[test] fn benchmark_parse_macro_rules() { if skip_slow_tests() { @@ -79,12 +87,7 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let def_tt = syntax_node_to_token_tree( - rule.token_tree().unwrap().syntax(), - DummyFile, - 0.into(), - &Default::default(), - ); + let def_tt = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), NoOpMap); (id, def_tt) }) .collect() diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 482d0157b2dbd..0b8461200e633 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -30,12 +30,12 @@ use crate::{ // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces pub use ::parser::TopEntryPoint; -pub use tt::{Delimiter, DelimiterKind, Punct}; +pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; pub use crate::{ syntax_bridge::{ map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, - syntax_node_to_token_tree_censored, token_tree_to_syntax_node, + syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper, }, token_map::TokenMap, }; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index f47123336b44d..36c63b365d2c0 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -17,43 +17,52 @@ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; #[cfg(test)] mod tests; +pub trait SpanMapper { + fn span_for(&self, range: TextRange) -> S; +} + +impl SpanMapper for TokenMap { + fn span_for(&self, range: TextRange) -> S { + self.span_for_range(range) + } +} + +impl> SpanMapper for &SM { + fn span_for(&self, range: TextRange) -> S { + SM::span_for(self, range) + } +} + /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -/// `anchor` and `anchor_offset` are used to convert the node's spans -/// to relative spans, relative to the passed anchor. -/// `map` is used to resolve the converted spans accordingly. /// TODO: Flesh out the doc comment more thoroughly -pub fn syntax_node_to_token_tree( +pub fn syntax_node_to_token_tree( node: &SyntaxNode, - anchor: Anchor, - anchor_offset: TextSize, - map: &TokenMap>, + map: SpanMap, ) -> tt::Subtree> where SpanData: Span, Anchor: Copy, Ctx: SyntaxContext, + SpanMap: SpanMapper>, { - assert!(anchor_offset <= node.text_range().start()); - let mut c = Converter::new(node, anchor_offset, vec![], map); - convert_tokens(&mut c, anchor) + let mut c = Converter::new(node, vec![], map); + convert_tokens(&mut c) } -pub fn syntax_node_to_token_tree_censored( +pub fn syntax_node_to_token_tree_censored( node: &SyntaxNode, - anchor: Anchor, - anchor_offset: TextSize, - map: &TokenMap>, + map: SpanMap, censored: Vec, ) -> tt::Subtree> where + SpanMap: SpanMapper>, SpanData: Span, Anchor: Copy, Ctx: SyntaxContext, { - assert!(anchor_offset <= node.text_range().start()); - let mut c = Converter::new(node, anchor_offset, censored, map); - convert_tokens(&mut c, anchor) + let mut c = Converter::new(node, censored, map); + convert_tokens(&mut c) } // The following items are what `rustc` macro can be parsed into : @@ -113,20 +122,21 @@ where SpanData: Span, Ctx: SyntaxContext, { - let mut map = TokenMap::default(); + let mut map = TokenMap::empty(); node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { map.insert( t.text_range(), SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY }, ); }); + map.finish(); map } /// Convert a string to a `TokenTree` pub fn parse_to_token_tree( - text: &str, anchor: Anchor, + text: &str, ) -> Option>> where SpanData: Span, @@ -137,8 +147,8 @@ where if lexed.errors().next().is_some() { return None; } - let mut conv = RawConverter { lexed, pos: 0, _offset: TextSize::default() }; - Some(convert_tokens(&mut conv, anchor)) + let mut conv = RawConverter { lexed, pos: 0, anchor }; + Some(convert_tokens(&mut conv)) } /// Split token tree with separate expr: $($e:expr)SEP* @@ -175,10 +185,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec( - conv: &mut C, - anchor: Anchor, -) -> tt::Subtree> +fn convert_tokens(conv: &mut C) -> tt::Subtree> where C: TokenConverter, Ctx: SyntaxContext, @@ -188,16 +195,15 @@ where let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); - while let Some((token, rel_range, abs_range)) = conv.bump() { + while let Some((token, abs_range)) = conv.bump() { let tt::Subtree { delimiter, token_trees: result } = stack.last_mut(); - let mk_dummy_span = || SpanData { range: rel_range, anchor, ctx: Ctx::DUMMY }; let kind = token.kind(conv); let tt = match kind { // Desugar doc comments into doc attributes COMMENT => { - let span = conv.span_for(abs_range).unwrap_or_else(mk_dummy_span); + let span = conv.span_for(abs_range); if let Some(tokens) = conv.convert_doc_comment(&token, span) { result.extend(tokens); } @@ -215,8 +221,7 @@ where // and end the subtree here if matches!(expected, Some(expected) if expected == kind) { if let Some(mut subtree) = stack.pop() { - subtree.delimiter.close = - conv.span_for(abs_range).unwrap_or_else(mk_dummy_span); + subtree.delimiter.close = conv.span_for(abs_range); stack.last_mut().token_trees.push(subtree.into()); } continue; @@ -231,11 +236,12 @@ where // Start a new subtree if let Some(kind) = delim { + let open = conv.span_for(abs_range); stack.push(tt::Subtree { delimiter: tt::Delimiter { - open: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), + open, // will be overwritten on subtree close above - close: mk_dummy_span(), + close: open, kind, }, token_trees: vec![], @@ -250,21 +256,12 @@ where let Some(char) = token.to_char(conv) else { panic!("Token from lexer must be single char: token = {token:#?}") }; - tt::Leaf::from(tt::Punct { - char, - spacing, - span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), - }) - .into() + tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }).into() } _ => { macro_rules! make_leaf { ($i:ident) => { - tt::$i { - span: conv.span_for(abs_range).unwrap_or_else(mk_dummy_span), - text: token.to_text(conv), - } - .into() + tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }.into() }; } let leaf: tt::Leaf<_> = match kind { @@ -273,32 +270,21 @@ where UNDERSCORE => make_leaf!(Ident), k if k.is_keyword() => make_leaf!(Ident), k if k.is_literal() => make_leaf!(Literal), - // FIXME: Check whether span splitting works as intended LIFETIME_IDENT => { - let char_unit = TextSize::of('\''); - let r = TextRange::at(rel_range.start(), char_unit); let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', spacing: tt::Spacing::Joint, - span: conv.span_for(abs_range).unwrap_or(SpanData { - range: r, - anchor, - ctx: Ctx::DUMMY, - }), + span: conv + .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))), }); result.push(apostrophe.into()); - let r = TextRange::at( - rel_range.start() + char_unit, - rel_range.len() - char_unit, - ); let ident = tt::Leaf::from(tt::Ident { text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.span_for(abs_range).unwrap_or(SpanData { - range: r, - anchor, - ctx: Ctx::DUMMY, - }), + span: conv.span_for(TextRange::at( + abs_range.start() + TextSize::of('\''), + abs_range.end(), + )), }); result.push(ident.into()); continue; @@ -433,10 +419,10 @@ fn convert_doc_comment( } /// A raw token (straight from lexer) converter -struct RawConverter<'a> { +struct RawConverter<'a, Anchor> { lexed: parser::LexedStr<'a>, pos: usize, - _offset: TextSize, + anchor: Anchor, } trait SrcToken: std::fmt::Debug { @@ -456,28 +442,28 @@ trait TokenConverter: Sized { span: SpanData, ) -> Option>>>; - fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)>; + fn bump(&mut self) -> Option<(Self::Token, TextRange)>; fn peek(&self) -> Option; - fn span_for(&self, range: TextRange) -> Option>; + fn span_for(&self, range: TextRange) -> SpanData; } -impl SrcToken> for usize { - fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { +impl SrcToken> for usize { + fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, Anchor>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, Anchor>) -> SmolStr { ctx.lexed.text(*self).into() } } -impl TokenConverter for RawConverter<'_> +impl TokenConverter for RawConverter<'_, Anchor> where SpanData: Span, { @@ -492,7 +478,7 @@ where convert_doc_comment(&doc_comment(text), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { if self.pos == self.lexed.len() { return None; } @@ -501,7 +487,7 @@ where let range = self.lexed.text_range(token); let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); - Some((token, range, range)) + Some((token, range)) } fn peek(&self) -> Option { @@ -511,41 +497,27 @@ where Some(self.pos) } - fn span_for(&self, _: TextRange) -> Option> { - None + fn span_for(&self, range: TextRange) -> SpanData { + SpanData { range, anchor: self.anchor, ctx: Ctx::DUMMY } } } -struct Converter<'a, Anchor, Ctx> { +struct Converter { current: Option, preorder: PreorderWithTokens, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, /// Used to make the emitted text ranges in the spans relative to the span anchor. - offset: TextSize, - map: &'a TokenMap>, + map: SpanMap, censored: Vec, } -impl<'a, Anchor, Ctx> Converter<'a, Anchor, Ctx> { - fn new( - node: &SyntaxNode, - anchor_offset: TextSize, - censored: Vec, - map: &'a TokenMap>, - ) -> Self { +impl Converter { + fn new(node: &SyntaxNode, censored: Vec, map: SpanMap) -> Self { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); let first = Self::next_token(&mut preorder, &censored); - Converter { - current: first, - preorder, - range, - punct_offset: None, - offset: anchor_offset, - censored, - map, - } + Converter { current: first, preorder, range, punct_offset: None, censored, map } } fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option { @@ -577,29 +549,30 @@ impl SynToken { } } -impl SrcToken> for SynToken { - fn kind(&self, ctx: &Converter<'_, Anchor, Ctx>) -> SyntaxKind { +impl SrcToken> for SynToken { + fn kind(&self, ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), } } - fn to_char(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> Option { + fn to_char(&self, _ctx: &Converter) -> Option { match self { SynToken::Ordinary(_) => None, SynToken::Punct(it, i) => it.text().chars().nth(*i), } } - fn to_text(&self, _ctx: &Converter<'_, Anchor, Ctx>) -> SmolStr { + fn to_text(&self, _ctx: &Converter) -> SmolStr { match self { SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(), } } } -impl TokenConverter for Converter<'_, Anchor, Ctx> +impl TokenConverter for Converter where SpanData: Span, + SpanMap: SpanMapper>, { type Token = SynToken; fn convert_doc_comment( @@ -610,18 +583,14 @@ where convert_doc_comment(token.token(), span) } - fn bump(&mut self) -> Option<(Self::Token, TextRange, TextRange)> { + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { if let Some((punct, offset)) = self.punct_offset.clone() { if usize::from(offset) + 1 < punct.text().len() { let offset = offset + TextSize::of('.'); let range = punct.text_range(); self.punct_offset = Some((punct.clone(), offset)); let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some(( - SynToken::Punct(punct, u32::from(offset) as usize), - range - self.offset, - range, - )); + return Some((SynToken::Punct(punct, u32::from(offset) as usize), range)); } } @@ -634,11 +603,11 @@ where self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - (SynToken::Punct(curr, 0 as usize), range - self.offset, range) + (SynToken::Punct(curr, 0 as usize), range) } else { self.punct_offset = None; let range = curr.text_range(); - (SynToken::Ordinary(curr), range - self.offset, range) + (SynToken::Ordinary(curr), range) }; Some(token) @@ -665,12 +634,15 @@ where Some(token) } - fn span_for(&self, range: TextRange) -> Option> { - self.map.span_for_range(range) + fn span_for(&self, range: TextRange) -> SpanData { + self.map.span_for(range) } } -struct TtTreeSink<'a, Anchor, Ctx> { +struct TtTreeSink<'a, Anchor, Ctx> +where + SpanData: Span, +{ buf: String, cursor: Cursor<'a, SpanData>, text_pos: TextSize, @@ -688,12 +660,12 @@ where cursor, text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), - token_map: TokenMap::default(), + token_map: TokenMap::empty(), } } fn finish(mut self) -> (Parse, TokenMap>) { - self.token_map.shrink_to_fit(); + self.token_map.finish(); (self.inner.finish(), self.token_map) } } @@ -825,6 +797,7 @@ where self.inner.token(kind, self.buf.as_str()); self.buf.clear(); + // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it. // Add whitespace between adjoint puncts let next = last.bump(); if let ( @@ -839,6 +812,7 @@ where // need to add whitespace either. if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' { self.inner.token(WHITESPACE, " "); + self.token_map.insert(TextRange::at(self.text_pos, TextSize::of(' ')), curr.span); self.text_pos += TextSize::of(' '); } } diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index 0275e5397c9b4..2e21977f681be 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -7,12 +7,14 @@ use tt::{ Leaf, Punct, Spacing, SpanAnchor, SyntaxContext, }; +use crate::SpanMapper; + use super::syntax_node_to_token_tree; fn check_punct_spacing(fixture: &str) { type SpanData = tt::SpanData; - #[derive(PartialEq, Eq, Clone, Copy, Debug)] + #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] struct DummyFile; impl SpanAnchor for DummyFile { const DUMMY: Self = DummyFile; @@ -24,9 +26,16 @@ fn check_punct_spacing(fixture: &str) { const DUMMY: Self = DummyCtx; } + struct NoOpMap; + + impl SpanMapper for NoOpMap { + fn span_for(&self, range: syntax::TextRange) -> SpanData { + SpanData { range, anchor: DummyFile, ctx: DummyCtx } + } + } + let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let subtree = - syntax_node_to_token_tree(source_file.syntax(), DummyFile, 0.into(), &Default::default()); + let subtree = syntax_node_to_token_tree(source_file.syntax(), NoOpMap); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index dfbf54410b180..978ee268c5ccc 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,7 +2,7 @@ use std::hash::Hash; -use stdx::never; +use stdx::itertools::Itertools; use syntax::TextRange; use tt::Span; @@ -15,23 +15,29 @@ use tt::Span; /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] // FIXME: Rename to SpanMap -pub struct TokenMap { +pub struct TokenMap { // FIXME: This needs to be sorted by (FileId, AstId) // Then we can do a binary search on the file id, // then a bin search on the ast id pub span_map: Vec<(TextRange, S)>, // span_map2: rustc_hash::FxHashMap, - pub real_file: bool, } -impl Default for TokenMap { - fn default() -> Self { - Self { span_map: Vec::new(), real_file: true } +impl TokenMap { + pub fn empty() -> Self { + Self { span_map: Vec::new() } } -} -impl TokenMap { - pub(crate) fn shrink_to_fit(&mut self) { + pub fn finish(&mut self) { + debug_assert_eq!( + self.span_map + .iter() + .sorted_by_key(|it| (it.0.start(), it.0.end())) + .tuple_windows() + .find(|(range, next)| range.0.end() != next.0.start()), + None, + "span map has holes!" + ); self.span_map.shrink_to_fit(); } @@ -40,6 +46,8 @@ impl TokenMap { } pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + // FIXME: linear search + // FIXME: Disregards resolving spans to get more matches! See ExpansionInfo::map_token_down self.span_map.iter().filter_map( move |(range, s)| { if s == &span { @@ -51,20 +59,31 @@ impl TokenMap { ) } - // FIXME: Should be infallible - pub fn span_for_range(&self, range: TextRange) -> Option { + // FIXME: We need APIs for fetching the span of a token as well as for a whole node. The node + // one *is* fallible though. + // Token span fetching technically only needs an offset really, as the entire file span is + // populated, where node fetching is more like fetching the spans at all source positions, and + // then we need to verify that all those positions have the same context, if not we fail! But + // how do we handle them having different span ranges? + + pub fn span_for_range(&self, range: TextRange) -> S { // TODO FIXME: make this proper self.span_map .iter() - .filter_map(|(r, s)| Some((r, s, r.intersect(range)?))) + .filter_map(|(r, s)| Some((r, s, r.intersect(range).filter(|it| !it.is_empty())?))) .max_by_key(|(_, _, intersection)| intersection.len()) - .map(|(_, &s, _)| s) - .or_else(|| { - if !self.real_file { - never!("no span for range {:?} in {:#?}", range, self.span_map); - } - None - }) + .map_or_else( + || panic!("no span for range {:?} in {:#?}", range, self.span_map), + |(_, &s, _)| s, + ) + } + + pub fn spans_for_node_range(&self, range: TextRange) -> impl Iterator + '_ { + // TODO FIXME: make this proper + self.span_map + .iter() + .filter(move |(r, _)| r.intersect(range).filter(|it| !it.is_empty()).is_some()) + .map(|&(_, s)| s) } // pub fn ranges_by_token( diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 84bd15efb8bee..00042480e3028 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -10,7 +10,7 @@ //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… -#![cfg(feature = "sysroot-abi")] +#![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] #![allow(unreachable_pub)] diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 4742dc1dfa79f..d679bfcb014c1 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,24 +209,26 @@ mod tests { use super::*; use cfg::CfgExpr; - use hir::HirFileId; - use ide_db::base_db::span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; - use mbe::syntax_node_to_token_tree; + use hir_def::tt::{self, Span}; + use mbe::{syntax_node_to_token_tree, SpanMapper}; use syntax::{ ast::{self, AstNode}, - SmolStr, TextSize, + SmolStr, }; + struct NoOpMap; + + impl SpanMapper for NoOpMap { + fn span_for(&self, _: syntax::TextRange) -> tt::SpanData { + tt::SpanData::DUMMY + } + } + fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree::<_, SyntaxContextId>( - tt.syntax(), - SpanAnchor { file_id: HirFileId::from(0), ast_id: ROOT_ERASED_FILE_AST_ID }, - TextSize::new(0), - &Default::default(), - ); + let tt = syntax_node_to_token_tree(tt.syntax(), &NoOpMap); CfgExpr::parse(&tt) }; diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 35986799fdd4d..ea6c11ac0d56a 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -17,6 +17,7 @@ always-assert = { version = "0.1.2", features = ["log"] } jod-thread = "0.1.2" libc.workspace = true crossbeam-channel = "0.5.5" +itertools.workspace = true # Think twice before adding anything here [target.'cfg(windows)'.dependencies] diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index f26c7f6dfc2d5..af7846d49c64d 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -15,6 +15,7 @@ pub mod thread; pub mod anymap; pub use always_assert::{always, never}; +pub use itertools; #[inline(always)] pub fn is_ci() -> bool { @@ -40,6 +41,24 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`. ); } +pub trait TupleExt { + type Head; + type Tail; + fn head(self) -> Self::Head; + fn tail(self) -> Self::Tail; +} + +impl TupleExt for (T, U) { + type Head = T; + type Tail = U; + fn head(self) -> Self::Head { + self.0 + } + fn tail(self) -> Self::Tail { + self.1 + } +} + pub fn to_lower_snake_case(s: &str) -> String { to_snake_case(s, char::to_lowercase) } diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 73e75a051bd66..1374ae1a6fc61 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -45,20 +45,32 @@ pub struct SpanData { } impl Span for SpanData { + type Anchor = Anchor; const DUMMY: Self = SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY, ctx: Ctx::DUMMY, }; + fn anchor(self) -> Self::Anchor { + self.anchor + } + fn mk(anchor: Self::Anchor, range: TextRange) -> Self { + SpanData { anchor, range, ctx: Ctx::DUMMY } + } } -pub trait SpanAnchor: std::fmt::Debug + Copy + Sized + Eq { +pub trait SpanAnchor: + std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash +{ const DUMMY: Self; } // FIXME: Get rid of this trait? pub trait Span: std::fmt::Debug + Copy + Sized + Eq { const DUMMY: Self; + type Anchor: Copy + fmt::Debug + Eq + std::hash::Hash; + fn anchor(self) -> Self::Anchor; + fn mk(anchor: Self::Anchor, range: TextRange) -> Self; } pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { From c43078f99d93af2bdc8f799ae77e10127aba39ca Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Nov 2023 14:39:55 +0100 Subject: [PATCH 10/80] Re-implement InFile wrappers as type aliases over generic InFileWrapper --- crates/base-db/src/span.rs | 20 +-- .../hir-def/src/macro_expansion_tests/mod.rs | 4 +- crates/hir-expand/src/db.rs | 8 +- crates/hir-expand/src/files.rs | 155 +++++++++++------- crates/hir-expand/src/lib.rs | 10 +- crates/hir/src/lib.rs | 6 +- crates/hir/src/semantics.rs | 12 +- crates/hir/src/symbols.rs | 5 +- .../src/handlers/generate_enum_variant.rs | 6 +- crates/ide-db/src/rename.rs | 2 +- crates/ide/src/annotations.rs | 6 +- crates/ide/src/status.rs | 10 +- crates/proc-macro-api/src/msg/flat.rs | 4 +- 13 files changed, 146 insertions(+), 102 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index a78f558759b00..e183e9b19965c 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -102,7 +102,7 @@ impl fmt::Debug for HirFileId { } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct MacroFile { +pub struct MacroFileId { pub macro_call_id: MacroCallId, } /// `MacroCallId` identifies a particular macro invocation, like @@ -113,18 +113,18 @@ crate::impl_intern_key!(MacroCallId); impl MacroCallId { pub fn as_file(self) -> HirFileId { - MacroFile { macro_call_id: self }.into() + MacroFileId { macro_call_id: self }.into() } - pub fn as_macro_file(self) -> MacroFile { - MacroFile { macro_call_id: self } + pub fn as_macro_file(self) -> MacroFileId { + MacroFileId { macro_call_id: self } } } #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub enum HirFileIdRepr { FileId(FileId), - MacroFile(MacroFile), + MacroFile(MacroFileId), } impl fmt::Debug for HirFileIdRepr { @@ -145,8 +145,8 @@ impl From for HirFileId { } } -impl From for HirFileId { - fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { +impl From for HirFileId { + fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self { let id = id.as_u32(); assert!(id < Self::MAX_FILE_ID); HirFileId(id | Self::MACRO_FILE_TAG_MASK) @@ -163,10 +163,10 @@ impl HirFileId { } #[inline] - pub fn macro_file(self) -> Option { + pub fn macro_file(self) -> Option { match self.0 & Self::MACRO_FILE_TAG_MASK { 0 => None, - _ => Some(MacroFile { + _ => Some(MacroFileId { macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), }), } @@ -184,7 +184,7 @@ impl HirFileId { pub fn repr(self) -> HirFileIdRepr { match self.0 & Self::MACRO_FILE_TAG_MASK { 0 => HirFileIdRepr::FileId(FileId(self.0)), - _ => HirFileIdRepr::MacroFile(MacroFile { + _ => HirFileIdRepr::MacroFile(MacroFileId { macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), }), } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 27ec63d171b07..355b82a5f42c1 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -18,7 +18,7 @@ use std::{iter, ops::Range, sync}; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFile}; +use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, @@ -94,7 +94,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream }) .unwrap(); let macro_call_id = res.value.unwrap(); - let macro_file = MacroFile { macro_call_id }; + let macro_file = MacroFileId { macro_call_id }; let mut expansion_result = db.parse_macro_expansion(macro_file); expansion_result.err = expansion_result.err.or(res.err); expansions.push((macro_call.value.clone(), expansion_result)); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 393e391f05170..e31034884c5ed 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -24,7 +24,7 @@ use crate::{ span::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, - MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, + MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -102,7 +102,7 @@ pub trait ExpandDatabase: SourceDatabase { // This query is LRU cached fn parse_macro_expansion( &self, - macro_file: MacroFile, + macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)>; #[salsa::transparent] fn span_map(&self, file_id: HirFileId) -> SpanMap; @@ -307,7 +307,7 @@ fn parse_or_expand_with_err( fn parse_macro_expansion( db: &dyn ExpandDatabase, - macro_file: MacroFile, + macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); @@ -326,7 +326,7 @@ fn parse_macro_expansion_error( db: &dyn ExpandDatabase, macro_call_id: MacroCallId, ) -> ExpandResult> { - db.parse_macro_expansion(MacroFile { macro_call_id }) + db.parse_macro_expansion(MacroFileId { macro_call_id }) .map(|it| it.0.errors().to_vec().into_boxed_slice()) } diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 45875d949817e..57a7fa5ec398f 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,15 +1,14 @@ use std::iter; use base_db::{ - span::{HirFileId, HirFileIdRepr, MacroFile, SyntaxContextId}, - FileRange, + span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId}, + FileId, FileRange, }; use either::Either; use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange}; use crate::{db, ExpansionInfo, HirFileIdExt as _}; -// FIXME: Make an InRealFile wrapper /// `InFile` stores a value of `T` inside a particular file/syntax tree. /// /// Typical usages are: @@ -18,55 +17,91 @@ use crate::{db, ExpansionInfo, HirFileIdExt as _}; /// * `InFile` -- ast node in a file /// * `InFile` -- offset in a file #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InFile { - pub file_id: HirFileId, +pub struct InFileWrapper { + pub file_id: FileKind, pub value: T, } +pub type InFile = InFileWrapper; +pub type InMacroFile = InFileWrapper; +pub type InRealFile = InFileWrapper; -impl InFile { - pub fn new(file_id: HirFileId, value: T) -> InFile { - InFile { file_id, value } +impl InFileWrapper { + pub fn new(file_id: FileKind, value: T) -> Self { + Self { file_id, value } } - pub fn with_value(&self, value: U) -> InFile { - InFile::new(self.file_id, value) + pub fn map U, U>(self, f: F) -> InFileWrapper { + InFileWrapper::new(self.file_id, f(self.value)) } +} - pub fn map U, U>(self, f: F) -> InFile { - InFile::new(self.file_id, f(self.value)) +impl InFileWrapper { + pub fn with_value(&self, value: U) -> InFileWrapper { + InFileWrapper::new(self.file_id, value) } - pub fn as_ref(&self) -> InFile<&T> { + pub fn as_ref(&self) -> InFileWrapper { self.with_value(&self.value) } +} - pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id) +impl InFileWrapper { + pub fn cloned(&self) -> InFileWrapper { + self.with_value(self.value.clone()) } } -impl InFile<&T> { - pub fn cloned(&self) -> InFile { - self.with_value(self.value.clone()) +impl From> for InFile { + fn from(InMacroFile { file_id, value }: InMacroFile) -> Self { + InFile { file_id: file_id.into(), value } } } -impl InFile> { - pub fn transpose(self) -> Option> { - let value = self.value?; - Some(InFile::new(self.file_id, value)) +impl From> for InFile { + fn from(InRealFile { file_id, value }: InRealFile) -> Self { + InFile { file_id: file_id.into(), value } } } -impl InFile> { - pub fn transpose(self) -> Either, InFile> { +// region:transpose impls + +impl InFileWrapper> { + pub fn transpose(self) -> Option> { + Some(InFileWrapper::new(self.file_id, self.value?)) + } +} + +impl InFileWrapper> { + pub fn transpose(self) -> Either, InFileWrapper> { match self.value { - Either::Left(l) => Either::Left(InFile::new(self.file_id, l)), - Either::Right(r) => Either::Right(InFile::new(self.file_id, r)), + Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)), + Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)), } } } +// endregion:transpose impls + +// region:specific impls + +impl InFile { + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_or_expand(self.file_id) + } +} + +impl InRealFile { + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse(self.file_id).syntax_node() + } +} + +impl InMacroFile { + pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_macro_expansion(self.file_id).value.0.syntax_node() + } +} + impl InFile<&SyntaxNode> { pub fn ancestors_with_macros( self, @@ -159,11 +194,17 @@ impl InFile<&SyntaxNode> { } } - pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option> { + pub fn original_syntax_node( + self, + db: &dyn db::ExpandDatabase, + ) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - let Some(file_id) = self.file_id.macro_file() else { - return Some(self.map(Clone::clone)); + let file_id = match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value.clone() }) + } + HirFileIdRepr::MacroFile(m) => m, }; if !self.file_id.is_attr_macro(db) { return None; @@ -182,7 +223,7 @@ impl InFile<&SyntaxNode> { let kind = self.value.kind(); // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? let value = anc.ancestors().find(|it| it.kind() == kind)?; - Some(InFile::new(file_id.into(), value)) + Some(InRealFile::new(file_id, value)) } } @@ -230,29 +271,11 @@ impl InFile { } } -#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] -pub struct InMacroFile { - pub file_id: MacroFile, - pub value: T, -} - -impl From> for InFile { - fn from(macro_file: InMacroFile) -> Self { - InFile { file_id: macro_file.file_id.into(), value: macro_file.value } - } -} - -pub fn ascend_range_up_macros( - db: &dyn db::ExpandDatabase, - range: InFile, -) -> (FileRange, SyntaxContextId) { - match range.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - (FileRange { file_id, range: range.value }, SyntaxContextId::ROOT) - } - HirFileIdRepr::MacroFile(m) => { - ExpansionInfo::new(db, m).map_token_range_up(db, range.value) - } +impl InFile { + /// Attempts to map the syntax node back up its macro calls. + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { + let (range, _ctxt) = ascend_range_up_macros(db, self); + range } } @@ -261,12 +284,14 @@ impl InFile { self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) } - // FIXME: this should return `Option>` - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { + pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input - let Some(file_id) = self.file_id.macro_file() else { - return Some(self); + let file_id = match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + return Some(InRealFile { file_id, value: self.value }) + } + HirFileIdRepr::MacroFile(m) => m, }; if !self.file_id.is_attr_macro(db) { return None; @@ -284,10 +309,24 @@ impl InFile { // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? let anc = db.parse(file_id).syntax_node().covering_element(range); let value = anc.ancestors().find_map(N::cast)?; - return Some(InFile::new(file_id.into(), value)); + Some(InRealFile::new(file_id, value)) } pub fn syntax(&self) -> InFile<&SyntaxNode> { self.with_value(self.value.syntax()) } } + +fn ascend_range_up_macros( + db: &dyn db::ExpandDatabase, + range: InFile, +) -> (FileRange, SyntaxContextId) { + match range.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (FileRange { file_id, range: range.value }, SyntaxContextId::ROOT) + } + HirFileIdRepr::MacroFile(m) => { + ExpansionInfo::new(db, m).map_token_range_up(db, range.value) + } + } +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index d2e5e7c3643f8..13eb90b18a8e0 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -48,9 +48,9 @@ use crate::{ }; pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; -pub use crate::files::{InFile, InMacroFile}; +pub use crate::files::{InFile, InMacroFile, InRealFile}; -pub use base_db::span::{HirFileId, MacroCallId, MacroFile}; +pub use base_db::span::{HirFileId, MacroCallId, MacroFileId}; pub use mbe::ValueResult; pub type DeclarativeMacro = ::mbe::DeclarativeMacro; @@ -206,7 +206,7 @@ impl HirFileIdExt for HirFileId { loop { match file_id.repr() { HirFileIdRepr::FileId(id) => break id, - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { + HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); let is_include_expansion = loc.def.is_include() && loc.eager.is_some(); file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) { @@ -241,7 +241,7 @@ impl HirFileIdExt for HirFileId { loop { match call.file_id.repr() { HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { + HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).to_node(db); } } @@ -700,7 +700,7 @@ impl ExpansionInfo { } } - pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> ExpansionInfo { + pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let arg_tt = loc.kind.arg(db); diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 443c19af82d3a..aacc3b08cb79b 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -59,7 +59,7 @@ use hir_def::{ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, }; -use hir_expand::{name::name, InMacroFile, MacroCallKind}; +use hir_expand::{name::name, MacroCallKind}; use hir_ty::{ all_super_traits, autoderef, check_orphan_rules, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, @@ -124,7 +124,7 @@ pub use { hir_expand::{ attrs::{Attr, AttrId}, name::{known, Name}, - tt, ExpandResult, HirFileId, HirFileIdExt, InFile, MacroFile, + tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, }, hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, @@ -3505,7 +3505,7 @@ impl Impl { } _ => return None, }; - let file_id = MacroFile { macro_call_id: derive_attr }; + let file_id = MacroFileId { macro_call_id: derive_attr }; let path = db .parse_macro_expansion(file_id) .value diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 137cffa7af150..e124e14a5422b 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -15,7 +15,9 @@ use hir_def::{ type_ref::Mutability, AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, }; -use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId}; +use hir_expand::{ + db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId, +}; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; @@ -756,8 +758,8 @@ impl<'db> SemanticsImpl<'db> { /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( - |InFile { file_id, value }| { - self.cache(find_root(value.syntax()), file_id); + |InRealFile { file_id, value }| { + self.cache(find_root(value.syntax()), file_id.into()); value }, ) @@ -768,8 +770,8 @@ impl<'db> SemanticsImpl<'db> { pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( - |InFile { file_id, value }| { - self.cache(find_root(&value), file_id); + |InRealFile { file_id, value }| { + self.cache(find_root(&value), file_id.into()); value }, ) diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 6ea926cc22d8c..a392070fd88e2 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -7,7 +7,7 @@ use hir_def::{ AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, }; -use hir_expand::{files::ascend_range_up_macros, HirFileId, InFile}; +use hir_expand::{HirFileId, InFile}; use hir_ty::db::HirDatabase; use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr}; @@ -51,8 +51,7 @@ impl DeclarationLocation { } pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange { - let mapping = InFile::new(self.hir_file_id, self.name_ptr.text_range()); - ascend_range_up_macros(db.upcast(), mapping).0 + InFile::new(self.hir_file_id, self.name_ptr.text_range()).original_file_range(db.upcast()) } } diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index be7a5e6c8bccc..1a1e992e28a48 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, HirDisplay, HirFileIdExt, InFile}; +use hir::{HasSource, HirDisplay, InRealFile}; use ide_db::assists::{AssistId, AssistKind}; use syntax::{ ast::{self, make, HasArgList}, @@ -114,14 +114,14 @@ fn add_variant_to_accumulator( parent: PathParent, ) -> Option<()> { let db = ctx.db(); - let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; + let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; acc.add( AssistId("generate_enum_variant", AssistKind::Generate), "Generate variant", target, |builder| { - builder.edit_file(file_id.original_file(db)); + builder.edit_file(file_id); let node = builder.make_mut(enum_node); let variant = make_variant(ctx, name_ref, parent); node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 9f101468a3500..676d286b8dcc0 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -533,7 +533,7 @@ fn source_edit_from_def( } }, }; - file_id = source.file_id.file_id(); + file_id = Some(source.file_id); if let Either::Left(pat) = source.value { let name_range = pat.name().unwrap().syntax().text_range(); // special cases required for renaming fields/locals in Record patterns diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index fb79b5dc211a4..d7f82b4af3e10 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, InFile, Semantics}; +use hir::{HasSource, InFile, InRealFile, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::Definition, @@ -149,8 +149,8 @@ pub(crate) fn annotations( node: InFile, source_file_id: FileId, ) -> Option<(TextRange, Option)> { - if let Some(InFile { file_id, value }) = node.original_ast_node(db) { - if file_id == source_file_id.into() { + if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) { + if file_id == source_file_id { return Some(( value.syntax().text_range(), value.name().map(|name| name.syntax().text_range()), diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index c9ee460a1c261..e7f97ebe6f7bd 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData}; use hir::{ db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery}, - Attr, Attrs, ExpandResult, MacroFile, Module, + Attr, Attrs, ExpandResult, MacroFileId, Module, }; use ide_db::{ base_db::{ @@ -199,8 +199,12 @@ impl StatCollect> for SyntaxTreeStats { } } -impl StatCollect, M)>> for SyntaxTreeStats { - fn collect_entry(&mut self, _: MacroFile, value: Option, M)>>) { +impl StatCollect, M)>> for SyntaxTreeStats { + fn collect_entry( + &mut self, + _: MacroFileId, + value: Option, M)>>, + ) { self.total += 1; self.retained += value.is_some() as usize; } diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 22d9f3952cc28..f29aac5295682 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -88,7 +88,7 @@ pub struct FlatTree { } #[derive(Serialize, Deserialize, Debug)] -pub struct SpanMap { +struct SpanMap { #[serde(skip_serializing)] serialize: bool, span_size: u32, @@ -122,7 +122,7 @@ impl SpanMap { } impl SpanMap { - pub fn do_serialize(&self) -> bool { + fn do_serialize(&self) -> bool { self.serialize } } From 6208960c4893ef43ea432a1eb28c5a28f754b049 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Nov 2023 15:10:31 +0100 Subject: [PATCH 11/80] Deduplicate dummy test span maps --- crates/base-db/src/input.rs | 7 +- crates/base-db/src/lib.rs | 13 ++- crates/base-db/src/span.rs | 6 +- crates/cfg/src/tests.rs | 30 ++----- crates/mbe/src/benchmark.rs | 80 +++++++++---------- crates/mbe/src/lib.rs | 2 + crates/mbe/src/syntax_bridge.rs | 28 +++++++ crates/mbe/src/syntax_bridge/tests.rs | 32 +------- crates/proc-macro-srv-cli/src/main.rs | 2 + crates/proc-macro-srv/src/lib.rs | 1 + crates/rust-analyzer/src/cargo_target_spec.rs | 13 +-- crates/tt/src/lib.rs | 24 +----- 12 files changed, 99 insertions(+), 139 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 2fa5c25c91e5d..0b04a91f626b1 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -243,6 +243,9 @@ impl CrateDisplayName { } } +// FIXME: These should not be defined in here? Why does base db know about proc-macros +// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either. + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct ProcMacroId(pub u32); @@ -324,7 +327,9 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level things + // FIXME: These things should not be per crate! These are more per workspace crate graph level + // things. This info does need to be somewhat present though as to prevent deduplication from + // happening across different workspaces with different layouts. pub target_layout: TargetLayoutLoadResult, pub channel: Option, } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 6dc1629c3be9b..38a2641230b76 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -4,6 +4,7 @@ mod input; mod change; +// FIXME: Is this purely a test util mod? Consider #[cfg(test)] gating it. pub mod fixture; pub mod span; @@ -13,14 +14,13 @@ use rustc_hash::FxHashSet; use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use triomphe::Arc; -pub use crate::input::DependencyKind; pub use crate::{ change::Change, input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, - Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, - ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, + DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, + ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, + ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; @@ -69,8 +69,7 @@ pub trait FileLoader { /// model. Everything else in rust-analyzer is derived from these queries. #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { - // Parses the file into the syntax tree. - #[salsa::invoke(parse_query)] + /// Parses the file into the syntax tree. fn parse(&self, file_id: FileId) -> Parse; /// The crate graph. @@ -82,7 +81,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { fn proc_macros(&self) -> Arc; } -fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { +fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let text = db.file_text(file_id); SourceFile::parse(&text) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index e183e9b19965c..370c732813ac4 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -1,3 +1,5 @@ +/// File and span related types. +// FIXME: This should probably be moved into its own crate. use std::fmt; use salsa::InternId; @@ -29,10 +31,10 @@ impl SyntaxContext for SyntaxContextId { } // inherent trait impls please tyvm impl SyntaxContextId { - // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context + // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); - // FIXME: This is very much UB, salsa exposes no way to create an InternId in a const context + // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) }); diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs index 61cdbded0b99e..c7ac1af934a08 100644 --- a/crates/cfg/src/tests.rs +++ b/crates/cfg/src/tests.rs @@ -1,34 +1,14 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; -use mbe::{syntax_node_to_token_tree, SpanMapper}; +use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use syntax::{ast, AstNode}; -use tt::{SpanAnchor, SyntaxContext}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -struct DummyFile; -impl SpanAnchor for DummyFile { - const DUMMY: Self = DummyFile; -} -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -struct DummyCtx; -impl SyntaxContext for DummyCtx { - const DUMMY: Self = DummyCtx; -} - -struct NoOpMap; - -impl SpanMapper> for NoOpMap { - fn span_for(&self, range: syntax::TextRange) -> tt::SpanData { - tt::SpanData { range, anchor: DummyFile, ctx: DummyCtx } - } -} - fn assert_parse_result(input: &str, expected: CfgExpr) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); assert_eq!(cfg, expected); } @@ -36,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) { fn check_dnf(input: &str, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); expect.assert_eq(&actual); @@ -45,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); @@ -56,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { let source_file = ast::SourceFile::parse(input).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), NoOpMap); + let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let cfg = CfgExpr::parse(&tt); let dnf = DnfExpr::new(cfg); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::>(); diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index e7fbb918897d8..271efe1a92e9e 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,35 +6,13 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; -use tt::{Span, SpanAnchor, SyntaxContext}; +use tt::Span; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, SpanMapper, + syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, }; -type SpanData = tt::SpanData; - -#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] -struct DummyFile; -impl SpanAnchor for DummyFile { - const DUMMY: Self = DummyFile; -} - -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -struct DummyCtx; -impl SyntaxContext for DummyCtx { - const DUMMY: Self = DummyCtx; -} - -struct NoOpMap; - -impl SpanMapper for NoOpMap { - fn span_for(&self, range: syntax::TextRange) -> SpanData { - SpanData { range, anchor: DummyFile, ctx: DummyCtx } - } -} - #[test] fn benchmark_parse_macro_rules() { if skip_slow_tests() { @@ -70,14 +48,14 @@ fn benchmark_expand_macro_rules() { assert_eq!(hash, 69413); } -fn macro_rules_fixtures() -> FxHashMap> { +fn macro_rules_fixtures() -> FxHashMap> { macro_rules_fixtures_tt() .into_iter() .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } -fn macro_rules_fixtures_tt() -> FxHashMap> { +fn macro_rules_fixtures_tt() -> FxHashMap> { let fixture = bench_fixture::numerous_macro_rules(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); @@ -87,7 +65,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { .filter_map(ast::MacroRules::cast) .map(|rule| { let id = rule.name().unwrap().to_string(); - let def_tt = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), NoOpMap); + let def_tt = + syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap); (id, def_tt) }) .collect() @@ -95,8 +74,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap> { /// Generate random invocation fixtures from rules fn invocation_fixtures( - rules: &FxHashMap>, -) -> Vec<(String, tt::Subtree)> { + rules: &FxHashMap>, +) -> Vec<(String, tt::Subtree)> { let mut seed = 123456789; let mut res = Vec::new(); @@ -118,8 +97,8 @@ fn invocation_fixtures( loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: SpanData::DUMMY, - close: SpanData::DUMMY, + open: DummyTestSpanData::DUMMY, + close: DummyTestSpanData::DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -141,7 +120,11 @@ fn invocation_fixtures( } return res; - fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + fn collect_from_op( + op: &Op, + parent: &mut tt::Subtree, + seed: &mut usize, + ) { return match op { Op::Var { kind, .. } => match kind.as_ref() { Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), @@ -227,22 +210,35 @@ fn invocation_fixtures( *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed } - fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { span: SpanData::DUMMY, text: SmolStr::new(ident) }).into() - } - fn make_punct(char: char) -> tt::TokenTree { - tt::Leaf::Punct(tt::Punct { span: SpanData::DUMMY, char, spacing: tt::Spacing::Alone }) + fn make_ident(ident: &str) -> tt::TokenTree { + tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) }) .into() } - fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { span: SpanData::DUMMY, text: SmolStr::new(lit) }).into() + fn make_punct(char: char) -> tt::TokenTree { + tt::Leaf::Punct(tt::Punct { + span: DummyTestSpanData::DUMMY, + char, + spacing: tt::Spacing::Alone, + }) + .into() + } + fn make_literal(lit: &str) -> tt::TokenTree { + tt::Leaf::Literal(tt::Literal { + span: DummyTestSpanData::DUMMY, + text: SmolStr::new(lit), + }) + .into() } fn make_subtree( kind: tt::DelimiterKind, - token_trees: Option>>, - ) -> tt::TokenTree { + token_trees: Option>>, + ) -> tt::TokenTree { tt::Subtree { - delimiter: tt::Delimiter { open: SpanData::DUMMY, close: SpanData::DUMMY, kind }, + delimiter: tt::Delimiter { + open: DummyTestSpanData::DUMMY, + close: DummyTestSpanData::DUMMY, + kind, + }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 0b8461200e633..1b13b39f0177b 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -40,6 +40,8 @@ pub use crate::{ token_map::TokenMap, }; +pub use crate::syntax_bridge::dummy_test_span_utils::*; + #[derive(Debug, PartialEq, Eq, Clone)] pub enum ParseError { UnexpectedToken(Box), diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 36c63b365d2c0..688ccb23252ca 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -33,6 +33,34 @@ impl> SpanMapper for &SM { } } +pub(crate) mod dummy_test_span_utils { + use super::*; + + pub type DummyTestSpanData = tt::SpanData; + + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct DummyTestSpanAnchor; + impl tt::SpanAnchor for DummyTestSpanAnchor { + const DUMMY: Self = DummyTestSpanAnchor; + } + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub struct DummyTestSyntaxContext; + impl SyntaxContext for DummyTestSyntaxContext { + const DUMMY: Self = DummyTestSyntaxContext; + } + + pub struct DummyTestSpanMap; + + impl SpanMapper> for DummyTestSpanMap { + fn span_for( + &self, + range: syntax::TextRange, + ) -> tt::SpanData { + tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext } + } + } +} + /// Convert the syntax node to a `TokenTree` (what macro /// will consume). /// TODO: Flesh out the doc comment more thoroughly diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index 2e21977f681be..bd8187a148a5e 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -4,38 +4,14 @@ use syntax::{ast, AstNode}; use test_utils::extract_annotations; use tt::{ buffer::{TokenBuffer, TokenTreeRef}, - Leaf, Punct, Spacing, SpanAnchor, SyntaxContext, + Leaf, Punct, Spacing, }; -use crate::SpanMapper; - -use super::syntax_node_to_token_tree; +use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap}; fn check_punct_spacing(fixture: &str) { - type SpanData = tt::SpanData; - - #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] - struct DummyFile; - impl SpanAnchor for DummyFile { - const DUMMY: Self = DummyFile; - } - - #[derive(PartialEq, Eq, Clone, Copy, Debug)] - struct DummyCtx; - impl SyntaxContext for DummyCtx { - const DUMMY: Self = DummyCtx; - } - - struct NoOpMap; - - impl SpanMapper for NoOpMap { - fn span_for(&self, range: syntax::TextRange) -> SpanData { - SpanData { range, anchor: DummyFile, ctx: DummyCtx } - } - } - let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); - let subtree = syntax_node_to_token_tree(source_file.syntax(), NoOpMap); + let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap); let mut annotations: HashMap<_, _> = extract_annotations(fixture) .into_iter() .map(|(range, annotation)| { @@ -53,7 +29,7 @@ fn check_punct_spacing(fixture: &str) { while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { if let TokenTreeRef::Leaf( - Leaf::Punct(Punct { spacing, span: SpanData { range, .. }, .. }), + Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }), _, ) = token_tree { diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index bece1951872c4..ea65c33604f8e 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -18,11 +18,13 @@ fn main() -> std::io::Result<()> { run() } +#[cfg(not(FALSE))] #[cfg(not(feature = "sysroot-abi"))] fn run() -> io::Result<()> { panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); } +#[cfg(FALSE)] #[cfg(feature = "sysroot-abi")] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 00042480e3028..bd0d1b79fa1fc 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -10,6 +10,7 @@ //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… +#![cfg(FALSE)] // TODO #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index d679bfcb014c1..728bade0d0a5f 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -209,26 +209,17 @@ mod tests { use super::*; use cfg::CfgExpr; - use hir_def::tt::{self, Span}; - use mbe::{syntax_node_to_token_tree, SpanMapper}; + use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use syntax::{ ast::{self, AstNode}, SmolStr, }; - struct NoOpMap; - - impl SpanMapper for NoOpMap { - fn span_for(&self, _: syntax::TextRange) -> tt::SpanData { - tt::SpanData::DUMMY - } - } - fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); - let tt = syntax_node_to_token_tree(tt.syntax(), &NoOpMap); + let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap); CfgExpr::parse(&tt) }; diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 1374ae1a6fc61..7977d97797ad9 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -7,31 +7,9 @@ use std::fmt; use stdx::impl_from; -use text_size::{TextRange, TextSize}; pub use smol_str::SmolStr; - -/// Represents identity of the token. -/// -/// For hygiene purposes, we need to track which expanded tokens originated from -/// which source tokens. We do it by assigning an distinct identity to each -/// source token and making sure that identities are preserved during macro -/// expansion. -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct TokenId(pub u32); - -impl fmt::Debug for TokenId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl TokenId { - pub const UNSPECIFIED: TokenId = TokenId(!0); - pub const fn unspecified() -> TokenId { - Self::UNSPECIFIED - } -} +pub use text_size::{TextRange, TextSize}; #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub struct SpanData { From ab8f12e169b72a39a6d2e68b569c8e4c29259ffc Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Nov 2023 15:37:40 +0100 Subject: [PATCH 12/80] Rename hygiene vars and fields to span_map --- crates/hir-def/src/attr.rs | 53 +++++++++++++-------------- crates/hir-def/src/expander.rs | 14 +++---- crates/hir-def/src/item_tree.rs | 8 ++-- crates/hir-def/src/item_tree/lower.rs | 13 ++++--- crates/hir-def/src/lower.rs | 18 +++++---- crates/hir-def/src/path/lower.rs | 6 +-- crates/hir-def/src/visibility.rs | 14 +++---- crates/hir-expand/src/attrs.rs | 18 ++++----- crates/hir-expand/src/eager.rs | 4 +- crates/hir-expand/src/mod_path.rs | 12 +++--- crates/hir-ty/src/display.rs | 2 +- crates/hir/src/semantics.rs | 4 +- crates/hir/src/source_analyzer.rs | 2 +- 13 files changed, 85 insertions(+), 83 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 45dd981dceece..942b28fc1450e 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -415,35 +415,32 @@ impl AttrsWithOwner { AttrDefId::StaticId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::FunctionId(it) => attrs_from_item_tree_assoc(db, it), AttrDefId::TypeAliasId(it) => attrs_from_item_tree_assoc(db, it), - AttrDefId::GenericParamId(it) => { - // FIXME: we could probably just make these relative to the params? - match it { - GenericParamId::ConstParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) - } - GenericParamId::TypeParamId(it) => { - let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) - } - GenericParamId::LifetimeParamId(it) => { - let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id]), - db.span_map(src.file_id).as_ref(), - ) - } + AttrDefId::GenericParamId(it) => match it { + GenericParamId::ConstParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), + ) } - } + GenericParamId::TypeParamId(it) => { + let src = it.parent().child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(&src.value[it.local_id()]), + db.span_map(src.file_id).as_ref(), + ) + } + GenericParamId::LifetimeParamId(it) => { + let src = it.parent.child_source(db); + RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(&src.value[it.local_id]), + db.span_map(src.file_id).as_ref(), + ) + } + }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it), diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index d8ee61a3dca36..964f07072316a 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -18,7 +18,7 @@ use crate::{ #[derive(Debug)] pub struct Expander { cfg_options: CfgOptions, - hygiene: SpanMap, + span_map: SpanMap, krate: CrateId, pub(crate) current_file_id: HirFileId, pub(crate) module: ModuleId, @@ -41,7 +41,7 @@ impl Expander { recursion_depth: 0, recursion_limit, cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), - hygiene: db.span_map(current_file_id), + span_map: db.span_map(current_file_id), krate: module.krate, } } @@ -95,7 +95,7 @@ impl Expander { } pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.hygiene = db.span_map(mark.file_id); + self.span_map = db.span_map(mark.file_id); self.current_file_id = mark.file_id; if self.recursion_depth == u32::MAX { // Recursion limit has been reached somewhere in the macro expansion tree. Reset the @@ -110,7 +110,7 @@ impl Expander { } pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { - LowerCtx::new(db, self.hygiene.clone(), self.current_file_id) + LowerCtx::new(db, self.span_map.clone(), self.current_file_id) } pub(crate) fn to_source(&self, value: T) -> InFile { @@ -118,7 +118,7 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.hygiene.as_ref())) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref())) } pub(crate) fn cfg_options(&self) -> &CfgOptions { @@ -130,7 +130,7 @@ impl Expander { } pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option { - let ctx = LowerCtx::new(db, self.hygiene.clone(), self.current_file_id); + let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id); Path::from_src(path, &ctx) } @@ -174,7 +174,7 @@ impl Expander { let parse = value.cast::()?; self.recursion_depth += 1; - self.hygiene = db.span_map(file_id); + self.span_map = db.span_map(file_id); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let mark = Mark { file_id: old_file_id, diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 9c61deb423cb9..97b62941842d0 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -776,8 +776,8 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = db.span_map(file_id); - let (_, source_map) = lower::lower_use_tree(db, hygiene.as_ref(), ast_use_tree) + let span_map = db.span_map(file_id); + let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree) .expect("failed to lower use tree"); source_map[index].clone() } @@ -791,8 +791,8 @@ impl Use { // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); - let hygiene = db.span_map(file_id); - lower::lower_use_tree(db, hygiene.as_ref(), ast_use_tree) + let span_map = db.span_map(file_id); + lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree) .expect("failed to lower use tree") .1 } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index 933a59be15347..92a2a94623272 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -657,7 +657,8 @@ impl<'a> Ctx<'a> { } fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { - let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.span_map()); + let vis = + RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map()); self.data().vis.alloc(vis) } @@ -735,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned { struct UseTreeLowering<'a> { db: &'a dyn DefDatabase, - hygiene: SpanMapRef<'a>, + span_map: SpanMapRef<'a>, mapping: Arena, } @@ -748,7 +749,7 @@ impl UseTreeLowering<'_> { // E.g. `use something::{inner}` (prefix is `None`, path is `something`) // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) Some(path) => { - match ModPath::from_src(self.db.upcast(), path, self.hygiene) { + match ModPath::from_src(self.db.upcast(), path, self.span_map) { Some(it) => Some(it), None => return None, // FIXME: report errors somewhere } @@ -767,7 +768,7 @@ impl UseTreeLowering<'_> { } else { let is_glob = tree.star_token().is_some(); let path = match tree.path() { - Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?), + Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?), None => None, }; let alias = tree.rename().map(|a| { @@ -803,10 +804,10 @@ impl UseTreeLowering<'_> { pub(crate) fn lower_use_tree( db: &dyn DefDatabase, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, tree: ast::UseTree, ) -> Option<(UseTree, Arena)> { - let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; + let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() }; let tree = lowering.lower_use_tree(tree)?; Some((tree, lowering.mapping)) } diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index a5c22898245d3..6e7fb0845407c 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -13,26 +13,30 @@ use crate::{db::DefDatabase, path::Path}; pub struct LowerCtx<'a> { pub db: &'a dyn DefDatabase, - hygiene: SpanMap, + span_map: SpanMap, // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways. ast_id_map: Option<(HirFileId, OnceCell>)>, } impl<'a> LowerCtx<'a> { - pub fn new(db: &'a dyn DefDatabase, hygiene: SpanMap, file_id: HirFileId) -> Self { - LowerCtx { db, hygiene, ast_id_map: Some((file_id, OnceCell::new())) } + pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self { + LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) } } pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { - LowerCtx { db, hygiene: db.span_map(file_id), ast_id_map: Some((file_id, OnceCell::new())) } + LowerCtx { + db, + span_map: db.span_map(file_id), + ast_id_map: Some((file_id, OnceCell::new())), + } } - pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: SpanMap) -> Self { - LowerCtx { db, hygiene, ast_id_map: None } + pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self { + LowerCtx { db, span_map, ast_id_map: None } } pub(crate) fn span_map(&self) -> SpanMapRef<'_> { - self.hygiene.as_ref() + self.span_map.as_ref() } pub(crate) fn lower_path(&self, ast: ast::Path) -> Option { diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index 9b499c5619efc..9ab4bdaa4ac81 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -26,7 +26,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option) -> Option) -> Option>, ) -> RawVisibility { - Self::from_ast_with_hygiene(db, node.value, db.span_map(node.file_id).as_ref()) + Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref()) } - pub(crate) fn from_ast_with_hygiene( + pub(crate) fn from_ast_with_span_map( db: &dyn DefDatabase, node: Option, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> RawVisibility { - Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene) + Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map) } - pub(crate) fn from_ast_with_hygiene_and_default( + pub(crate) fn from_ast_with_span_map_and_default( db: &dyn DefDatabase, node: Option, default: RawVisibility, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> RawVisibility { let node = match node { None => return default, @@ -57,7 +57,7 @@ impl RawVisibility { }; match node.kind() { ast::VisibilityKind::In(path) => { - let path = ModPath::from_src(db.upcast(), path, hygiene); + let path = ModPath::from_src(db.upcast(), path, span_map); let path = match path { None => return RawVisibility::private(), Some(path) => path, diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index c4937ae08b1e1..1fdb5dc37b88b 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -42,18 +42,18 @@ impl RawAttrs { pub fn new( db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> Self { let entries = collect_attrs(owner) .filter_map(|(id, attr)| match attr { Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) + attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) } Either::Right(comment) => comment.doc_comment().map(|doc| Attr { id, input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), path: Interned::new(ModPath::from(crate::name!(doc))), - ctxt: hygiene.span_for_range(comment.syntax().text_range()).ctx, + ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, }), }) .collect::>(); @@ -66,9 +66,9 @@ impl RawAttrs { pub fn from_attrs_owner( db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> Self { - Self::new(db, owner.value, hygiene) + Self::new(db, owner.value, span_map) } pub fn merge(&self, other: Self) -> Self { @@ -216,10 +216,10 @@ impl Attr { fn from_src( db: &dyn ExpandDatabase, ast: ast::Meta, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, id: AttrId, ) -> Option { - let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); + let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let value = match lit.kind() { ast::LiteralKind::String(string) => string.value()?.into(), @@ -227,12 +227,12 @@ impl Attr { }; Some(Interned::new(AttrInput::Literal(value))) } else if let Some(tt) = ast.token_tree() { - let tree = syntax_node_to_token_tree(tt.syntax(), hygiene); + let tree = syntax_node_to_token_tree(tt.syntax(), span_map); Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) } else { None }; - Some(Attr { id, path, input, ctxt: hygiene.span_for_range(ast.syntax().text_range()).ctx }) + Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) } fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index bcb5383c3f9b6..4499c2e69d655 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -127,7 +127,7 @@ fn lazy_expand( fn eager_macro_recur( db: &dyn ExpandDatabase, - hygiene: &ExpansionSpanMap, + span_map: &ExpansionSpanMap, curr: InFile, krate: CrateId, call_site: SyntaxContextId, @@ -170,7 +170,7 @@ fn eager_macro_recur( }; let def = match call .path() - .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(hygiene))) + .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map))) { Some(path) => match macro_resolver(path.clone()) { Some(def) => def, diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index a518f7eb6a9f1..9534b5039f682 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -47,9 +47,9 @@ impl ModPath { pub fn from_src( db: &dyn ExpandDatabase, path: ast::Path, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> Option { - convert_path(db, None, path, hygiene) + convert_path(db, None, path, span_map) } pub fn from_segments(kind: PathKind, segments: impl IntoIterator) -> ModPath { @@ -194,10 +194,10 @@ fn convert_path( db: &dyn ExpandDatabase, prefix: Option, path: ast::Path, - hygiene: SpanMapRef<'_>, + span_map: SpanMapRef<'_>, ) -> Option { let prefix = match path.qualifier() { - Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?), + Some(qual) => Some(convert_path(db, prefix, qual, span_map)?), None => prefix, }; @@ -211,7 +211,7 @@ fn convert_path( ModPath::from_kind( resolve_crate_root( db, - hygiene.span_for_range(name_ref.syntax().text_range()).ctx, + span_map.span_for_range(name_ref.syntax().text_range()).ctx, ) .map(PathKind::DollarCrate) .unwrap_or(PathKind::Crate), @@ -265,7 +265,7 @@ fn convert_path( // We follow what it did anyway :) if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { - let syn_ctx = hygiene.span_for_range(segment.syntax().text_range()).ctx; + let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx; if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn { if db.lookup_intern_macro_call(macro_call_id).def.local_inner { mod_path.kind = match resolve_crate_root(db, syn_ctx) { diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 0712fc1c50c7f..3d426caf680e9 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -1732,7 +1732,7 @@ impl HirDisplay for TypeRef { f.write_joined(bounds, " + ")?; } TypeRef::Macro(macro_call) => { - let ctx = hir_def::lower::LowerCtx::with_hygiene( + let ctx = hir_def::lower::LowerCtx::with_span_map( f.db.upcast(), f.db.span_map(macro_call.file_id), ); diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index e124e14a5422b..666ffd390ae73 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -866,8 +866,8 @@ impl<'db> SemanticsImpl<'db> { pub fn resolve_trait(&self, path: &ast::Path) -> Option { let analyze = self.analyze(path.syntax())?; - let hygiene = self.db.span_map(analyze.file_id); - let ctx = LowerCtx::with_hygiene(self.db.upcast(), hygiene); + let span_map = self.db.span_map(analyze.file_id); + let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map); let hir_path = Path::from_src(path.clone(), &ctx)?; match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index aaad82e128763..2339187122eea 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -595,7 +595,7 @@ impl SourceAnalyzer { } // This must be a normal source file rather than macro file. - let ctx = LowerCtx::with_hygiene(db.upcast(), db.span_map(self.file_id)); + let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id)); let hir_path = Path::from_src(path.clone(), &ctx)?; // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are From 92d447f9766e07747815a9fc01d25f95df0be581 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 25 Nov 2023 17:10:18 +0100 Subject: [PATCH 13/80] =?UTF-8?q?=F0=9F=A7=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crates/hir-def/src/attr/tests.rs | 2 +- crates/hir-def/src/expander.rs | 2 +- crates/hir-def/src/lower.rs | 2 +- .../hir-def/src/nameres/tests/incremental.rs | 1 + crates/hir-def/src/path.rs | 4 +- crates/hir-def/src/path/lower.rs | 18 ++-- crates/hir-expand/src/ast_id_map.rs | 4 +- crates/hir-expand/src/attrs.rs | 8 +- crates/hir-expand/src/builtin_fn_macro.rs | 2 +- crates/hir-expand/src/db.rs | 89 +++++++------------ crates/hir-expand/src/eager.rs | 8 +- crates/hir-expand/src/hygiene.rs | 2 +- crates/hir-expand/src/lib.rs | 5 +- crates/hir-expand/src/span.rs | 3 +- crates/hir-ty/src/display.rs | 2 +- crates/hir/src/attrs.rs | 2 +- crates/hir/src/db.rs | 23 ++++- crates/hir/src/lib.rs | 13 +-- crates/hir/src/semantics.rs | 4 +- crates/hir/src/source_analyzer.rs | 4 +- crates/ide-completion/src/completions.rs | 2 +- crates/ide-db/src/apply_change.rs | 15 +++- crates/ide/src/hover.rs | 1 - crates/mbe/src/syntax_bridge.rs | 1 - crates/mbe/src/token_map.rs | 6 -- crates/proc-macro-api/src/msg.rs | 2 +- 26 files changed, 102 insertions(+), 123 deletions(-) diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index 0f21dc98299f4..796f165c7c876 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -13,7 +13,7 @@ fn assert_parse_result(input: &str, expected: DocExpr) { let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( tt.syntax(), - SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))), + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))), ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index 964f07072316a..e36aa19b8ddc3 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -131,7 +131,7 @@ impl Expander { pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option { let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id); - Path::from_src(path, &ctx) + Path::from_src(&ctx, path) } fn within_limit( diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs index 6e7fb0845407c..a3505b65fe722 100644 --- a/crates/hir-def/src/lower.rs +++ b/crates/hir-def/src/lower.rs @@ -40,7 +40,7 @@ impl<'a> LowerCtx<'a> { } pub(crate) fn lower_path(&self, ast: ast::Path) -> Option { - Path::from_src(ast, self) + Path::from_src(self, ast) } pub(crate) fn ast_id(&self, item: &N) -> Option> { diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 3d7784bdd5428..78cb78e833ec5 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -11,6 +11,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: let (mut db, pos) = TestDB::with_position(ra_fixture_initial); let krate = { let crate_graph = db.crate_graph(); + // Some of these tests use minicore/proc-macros which will be injected as the first crate crate_graph.iter().last().unwrap() }; { diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 3894172a5ad86..215c49d4c2ce4 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -96,8 +96,8 @@ pub enum GenericArg { impl Path { /// Converts an `ast::Path` to `Path`. Works with use trees. /// It correctly handles `$crate` based path from macro call. - pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option { - lower::lower_path(path, ctx) + pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option { + lower::lower_path(ctx, path) } /// Converts a known mod path to `Path`. diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index 9ab4bdaa4ac81..39f1b6f1c06db 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -16,12 +16,9 @@ use crate::{ type_ref::{LifetimeRef, TypeBound, TypeRef}, }; -// fn resolve_crate_root - /// Converts an `ast::Path` to `Path`. Works with use trees. /// It correctly handles `$crate` based path from macro call. -// FIXME: flip the params -pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { +pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option { let mut kind = PathKind::Plain; let mut type_anchor = None; let mut segments = Vec::new(); @@ -36,18 +33,15 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option { - let name = if name_ref.text() == "$crate" { - kind = resolve_crate_root( + if name_ref.text() == "$crate" { + break kind = resolve_crate_root( ctx.db.upcast(), span_map.span_for_range(name_ref.syntax().text_range()).ctx, ) .map(PathKind::DollarCrate) .unwrap_or(PathKind::Crate); - - break; - } else { - name_ref.as_name() - }; + } + let name = name_ref.as_name(); let args = segment .generic_arg_list() .and_then(|it| lower_generic_args(ctx, it)) @@ -82,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option>::Foo desugars to Trait::Foo Some(trait_ref) => { let Path::Normal { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(trait_ref.path()?, ctx)? + Path::from_src(ctx, trait_ref.path()?)? else { return None; }; diff --git a/crates/hir-expand/src/ast_id_map.rs b/crates/hir-expand/src/ast_id_map.rs index 2d24496ab7013..be0b72f9dfa43 100644 --- a/crates/hir-expand/src/ast_id_map.rs +++ b/crates/hir-expand/src/ast_id_map.rs @@ -17,10 +17,10 @@ use profile::Count; use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; -pub use base_db::span::ErasedFileAstId; - use crate::db; +pub use base_db::span::ErasedFileAstId; + /// `AstId` points to an AST node in any file. /// /// It is stable across reparses, and can be used as salsa key/value. diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 1fdb5dc37b88b..f1619db2420a9 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -293,13 +293,8 @@ impl Attr { if tts.is_empty() { return None; } - // FIXME: Absolutely wrong - let call_site = match tts.first().unwrap() { - tt::TokenTree::Leaf(l) => l.span().ctx, - tt::TokenTree::Subtree(s) => s.delimiter.open.ctx, - }; // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation - // here. + // here or maybe just parse a mod path from a token tree directly let subtree = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: tts.to_vec(), @@ -313,6 +308,7 @@ impl Attr { return None; } let path = meta.path()?; + let call_site = span_map.span_for_range(path.syntax().text_range()).ctx; Some(( ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, call_site, diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 9fb04a2f1b081..459d1c868d808 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -553,7 +553,7 @@ pub(crate) fn include_arg_to_tt( let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); }; - let path = parse_string(&arg.0)?; + let path = parse_string(&arg)?; let file_id = relative_file(db, *arg_id, &path, false)?; // why are we not going through a SyntaxNode here? diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index e31034884c5ed..fed1705fb7d3e 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -190,15 +190,16 @@ pub fn expand_speculative( speculative_args: &SyntaxNode, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { - // FIXME spanmaps let loc = db.lookup_intern_macro_call(actual_macro_call); // Build the subtree and token mapping for the speculative args let _censor = censor_for_macro_input(&loc, speculative_args); + let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id); + let span_map = SpanMapRef::RealSpanMap(&span_map); let mut tt = mbe::syntax_node_to_token_tree( speculative_args, // we don't leak these spans into any query so its fine to make them absolute - SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), + span_map, ); let attr_arg = match loc.kind { @@ -216,10 +217,7 @@ pub fn expand_speculative( }?; match attr.token_tree() { Some(token_tree) => { - let mut tree = syntax_node_to_token_tree( - token_tree.syntax(), - SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), - ); + let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); tree.delimiter = tt::Delimiter::UNSPECIFIED; Some(tree) @@ -243,12 +241,7 @@ pub fn expand_speculative( MacroDefKind::BuiltInDerive(expander, ..) => { // this cast is a bit sus, can we avoid losing the typedness here? let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); - expander.expand( - db, - actual_macro_call, - &adt, - SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)), - ) + expander.expand(db, actual_macro_call, &adt, span_map) } MacroDefKind::Declarative(it) => { db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt) @@ -305,6 +298,8 @@ fn parse_or_expand_with_err( } } +// FIXME: We should verify that the parsed node is one of the many macro node variants we expect +// instead of having it be untyped fn parse_macro_expansion( db: &dyn ExpandDatabase, macro_file: MacroFileId, @@ -330,6 +325,18 @@ fn parse_macro_expansion_error( .map(|it| it.0.errors().to_vec().into_boxed_slice()) } +fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse, SpanMap) { + match file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id))) + } + HirFileIdRepr::MacroFile(macro_file) => { + let (parse, map) = db.parse_macro_expansion(macro_file).value; + (parse, SpanMap::ExpansionSpanMap(map)) + } + } +} + fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, @@ -361,32 +368,22 @@ fn macro_arg( .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some(Arc::new(arg.0.clone()))) + ValueResult::ok(Some(arg.clone())) } else { - //FIXME: clean this up, the ast id map lookup is done twice here - let (parse, map) = match loc.kind.file_id().repr() { - HirFileIdRepr::FileId(file_id) => { - let syntax = db.parse(file_id).to_syntax(); - - (syntax, SpanMap::RealSpanMap(db.real_span_map(file_id))) - } - HirFileIdRepr::MacroFile(macro_file) => { - let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse, SpanMap::ExpansionSpanMap(map)) - } - }; + let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); - match node.token_tree().map(|it| it.syntax().clone()) { + match node.token_tree() { Some(tt) => { - if let Some(e) = mismatched_delimiters(&tt) { + let tt = tt.syntax(); + if let Some(e) = mismatched_delimiters(tt) { return ValueResult::only_err(e); } - tt + tt.clone() } None => { return ValueResult::only_err(Arc::new(Box::new([ @@ -479,17 +476,8 @@ fn decl_macro_expander( id: AstId, ) -> Arc { let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; - let (root, map) = match id.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - // FIXME: Arc - // FIXME: id.to_ptr duplicated, expensive - (db.parse(file_id).syntax_node(), SpanMap::RealSpanMap(db.real_span_map(file_id))) - } - HirFileIdRepr::MacroFile(macro_file) => { - let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse.syntax_node(), SpanMap::ExpansionSpanMap(map)) - } - }; + let (root, map) = parse_with_map(db, id.file_id); + let root = root.syntax_node(); let transparency = |node| { // ... would be nice to have the item tree here @@ -568,21 +556,8 @@ fn macro_expand( let ExpandResult { value: tt, mut err } = match loc.def.kind { MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), MacroDefKind::BuiltInDerive(expander, ..) => { - // FIXME: add firewall query for this? - let hir_file_id = loc.kind.file_id(); - let (root, map) = match hir_file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - // FIXME: query for span map - ( - db.parse(file_id).syntax_node(), - SpanMap::RealSpanMap(db.real_span_map(file_id)), - ) - } - HirFileIdRepr::MacroFile(macro_file) => { - let (parse, map) = db.parse_macro_expansion(macro_file).value; - (parse.syntax_node(), SpanMap::ExpansionSpanMap(map)) - } - }; + let (root, map) = parse_with_map(db, loc.kind.file_id()); + let root = root.syntax_node(); let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; let node = ast_id.to_ptr(db).to_node(&root); @@ -710,9 +685,9 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point); + let (parse, mut span_map) = mbe::token_tree_to_syntax_node(tt, entry_point); // FIXME: now what the hell is going on here - tm.1.span_map.sort_by(|(_, a), (_, b)| { + span_map.span_map.sort_by(|(_, a), (_, b)| { a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| { let map = db.ast_id_map(a.anchor.file_id.into()); map.get_erased(a.anchor.ast_id) @@ -721,7 +696,7 @@ fn token_tree_to_syntax_node( .cmp(&map.get_erased(b.anchor.ast_id).text_range().start()) }) }); - tm + (parse, span_map) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult>> { diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 4499c2e69d655..deea59b93b621 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -81,7 +81,7 @@ pub fn expand_eager_macro_input( // FIXME: Spans! let mut subtree = mbe::syntax_node_to_token_tree( &expanded_eager_input, - RealSpanMap::empty(::DUMMY.file_id), + RealSpanMap::absolute(::DUMMY.file_id), ); subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED; @@ -89,11 +89,7 @@ pub fn expand_eager_macro_input( let loc = MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree,)), - arg_id, - error: err.clone(), - })), + eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, call_site, }; diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 66d9f679d8941..8fdfa8af0cac0 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -8,7 +8,7 @@ use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; use crate::db::ExpandDatabase; -#[derive(Debug, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { pub outer_expn: Option, pub outer_transparency: Transparency, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 13eb90b18a8e0..4e5aa90312610 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -135,7 +135,7 @@ pub enum MacroDefKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct EagerCallInfo { /// The expanded argument of the eager macro. - arg: Arc<(tt::Subtree,)>, + arg: Arc, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). arg_id: MacroCallId, error: Option, @@ -537,8 +537,6 @@ impl MacroCallKind { FileRange { range, file_id } } - // FIXME: -> InFile it should be impossible for the token tree to be missing at - // this point! fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile> { match self { MacroCallKind::FnLike { ast_id, .. } => { @@ -561,7 +559,6 @@ impl MacroCallKind { pub struct ExpansionInfo { pub expanded: InMacroFile, /// The argument TokenTree or item for attributes - // FIXME: Can this ever be `None`? arg: InFile>, /// The `macro_rules!` or attribute input. attr_input_or_mac_def: Option>, diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs index c16d761c17195..589f415de566e 100644 --- a/crates/hir-expand/src/span.rs +++ b/crates/hir-expand/src/span.rs @@ -77,7 +77,8 @@ pub struct RealSpanMap { } impl RealSpanMap { - pub fn empty(file_id: FileId) -> Self { + /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id). + pub fn absolute(file_id: FileId) -> Self { RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) } } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 3d426caf680e9..a324129b351cc 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -1738,7 +1738,7 @@ impl HirDisplay for TypeRef { ); let macro_call = macro_call.to_node(f.db.upcast()); match macro_call.path() { - Some(path) => match Path::from_src(path, &ctx) { + Some(path) => match Path::from_src(&ctx, path) { Some(path) => path.hir_fmt(f)?, None => write!(f, "{{macro}}")?, }, diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 42d1515e59312..0ac1db9311b1d 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -241,7 +241,7 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { ModPath::from_src( db.upcast(), ast_path, - SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))), + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))), ) }; diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index cc3e869aa7ce5..b9112dee31661 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -3,10 +3,27 @@ //! we didn't do that. //! //! But we need this for at least LRU caching at the query level. -pub use hir_def::db::*; +pub use hir_def::db::{ + AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery, + ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery, + CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery, + EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery, + FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, + FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery, + ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery, + InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, + InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery, + InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, + InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery, + InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery, + Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery, + StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery, + TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery, + UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery, +}; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, + ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, MacroArgQuery, + MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index aacc3b08cb79b..7687fcbccaec0 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -137,7 +137,13 @@ pub use { // These are negative re-exports: pub using these names is forbidden, they // should remain private to hir internals. #[allow(unused)] -use {hir_def::path::Path, hir_expand::name::AsName}; +use { + hir_def::path::Path, + hir_expand::{ + name::AsName, + span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef}, + }, +}; /// hir::Crate describes a single crate. It's the main interface with which /// a crate's dependencies interact. Mostly, it should be just a proxy for the @@ -3483,11 +3489,6 @@ impl Impl { self.id.lookup(db.upcast()).container.into() } - pub fn as_builtin_derive_attr(self, db: &dyn HirDatabase) -> Option> { - let src = self.source(db)?; - src.file_id.as_builtin_derive_attr_node(db.upcast()) - } - pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option> { let src = self.source(db)?; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 666ffd390ae73..f6ee836c5294a 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -868,7 +868,7 @@ impl<'db> SemanticsImpl<'db> { let analyze = self.analyze(path.syntax())?; let span_map = self.db.span_map(analyze.file_id); let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map); - let hir_path = Path::from_src(path.clone(), &ctx)?; + let hir_path = Path::from_src(&ctx, path.clone())?; match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { TypeNs::TraitId(id) => Some(Trait { id }), _ => None, @@ -1466,7 +1466,7 @@ impl SemanticsScope<'_> { /// necessary a heuristic, as it doesn't take hygiene into account. pub fn speculative_resolve(&self, path: &ast::Path) -> Option { let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id); - let path = Path::from_src(path.clone(), &ctx)?; + let path = Path::from_src(&ctx, path.clone())?; resolve_hir_path(self.db, &self.resolver, &path) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 2339187122eea..0f1093e6d1430 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -483,7 +483,7 @@ impl SourceAnalyzer { macro_call: InFile<&ast::MacroCall>, ) -> Option { let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id); - let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; + let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?; self.resolver .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) .map(|(it, _)| it.into()) @@ -596,7 +596,7 @@ impl SourceAnalyzer { // This must be a normal source file rather than macro file. let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id)); - let hir_path = Path::from_src(path.clone(), &ctx)?; + let hir_path = Path::from_src(&ctx, path.clone())?; // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index f49abcbae9bc4..7d38c638a8ed5 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -683,7 +683,7 @@ pub(super) fn complete_name_ref( ctx: &CompletionContext<'_>, NameRefContext { nameref, kind }: &NameRefContext, ) { - match dbg!(kind) { + match kind { NameRefKind::Path(path_ctx) => { flyimport::import_on_the_fly_path(acc, ctx, path_ctx); diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index dc77424c13144..2efe3ee1ed757 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -97,12 +97,14 @@ impl RootDatabase { // ExpandDatabase hir::db::AstIdMapQuery - hir::db::ParseMacroExpansionQuery + hir::db::DeclMacroExpanderQuery + hir::db::ExpandProcMacroQuery + hir::db::IncludeExpandQuery hir::db::InternMacroCallQuery hir::db::MacroArgQuery - hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery - hir::db::ExpandProcMacroQuery + hir::db::ParseMacroExpansionQuery + hir::db::RealSpanMapQuery // DefDatabase hir::db::FileItemTreeQuery @@ -142,6 +144,13 @@ impl RootDatabase { hir::db::FunctionVisibilityQuery hir::db::ConstVisibilityQuery hir::db::CrateSupportsNoStdQuery + hir::db::BlockItemTreeQueryQuery + hir::db::ExternCrateDeclDataQuery + hir::db::LangAttrQuery + hir::db::InternAnonymousConstQuery + hir::db::InternExternCrateQuery + hir::db::InternInTypeConstQuery + hir::db::InternUseQuery // HirDatabase hir::db::InferQueryQuery diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index fc0c9ef17fa13..e0b64fe7988e5 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -166,7 +166,6 @@ fn hover_simple( } else { sema.descend_into_macros_with_same_text(original_token.clone(), offset) }; - dbg!(&descended); let descended = || descended.iter(); let result = descended() diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 688ccb23252ca..bbf49670ce593 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -565,7 +565,6 @@ impl Converter { #[derive(Debug)] enum SynToken { Ordinary(SyntaxToken), - // FIXME is this supposed to be `Punct`? Punct(SyntaxToken, usize), } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 978ee268c5ccc..32f61af25e5d6 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -6,12 +6,6 @@ use stdx::itertools::Itertools; use syntax::TextRange; use tt::Span; -// pub type HirFile = u32; -// pub type FileRange = (HirFile, TextRange); -// Option, LocalSyntaxContet -// pub type SyntaxContext = (); -// pub type LocalSyntaxContext = u32; - /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] // FIXME: Rename to SpanMap diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 6a7329e322f31..4e6984f61b727 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -120,7 +120,7 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { Ok(()) } -/* +/*TODO #[cfg(test)] mod tests { From 98cfdde8ba5c784fb5d7114070abb80e01a6d2bb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 28 Nov 2023 10:55:21 +0100 Subject: [PATCH 14/80] Thinner TokenMap --- Cargo.lock | 2 +- crates/base-db/src/span.rs | 6 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 2 +- crates/hir-expand/src/attrs.rs | 2 +- crates/hir-expand/src/db.rs | 18 +-- crates/hir-expand/src/lib.rs | 91 +++++------- crates/hir-expand/src/span.rs | 6 +- crates/hir/src/semantics.rs | 65 ++++---- crates/hir/src/semantics/source_to_def.rs | 4 +- crates/hir/src/source_analyzer.rs | 7 +- crates/mbe/src/syntax_bridge.rs | 36 ++--- crates/mbe/src/token_map.rs | 140 +++--------------- .../src/integrated_benchmarks.rs | 3 +- 13 files changed, 123 insertions(+), 259 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3618d69c749a7..775231f3ea2e9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1752,7 +1752,7 @@ dependencies = [ "always-assert", "backtrace", "crossbeam-channel", - "itertools 0.12.0", + "itertools", "jod-thread", "libc", "miow", diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 370c732813ac4..607b8027ca952 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -33,10 +33,11 @@ impl SyntaxContext for SyntaxContextId { impl SyntaxContextId { // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) - pub const ROOT: Self = SyntaxContextId(unsafe { core::mem::transmute(1) }); + pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context // currently (which kind of makes sense but we need it here!) - pub const SELF_REF: Self = SyntaxContextId(unsafe { core::mem::transmute(!0u32) }); + pub const SELF_REF: Self = + SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); pub fn is_root(self) -> bool { self == Self::ROOT @@ -107,6 +108,7 @@ impl fmt::Debug for HirFileId { pub struct MacroFileId { pub macro_call_id: MacroCallId, } + /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index fc17dcde9a072..39079685a4d92 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -36,7 +36,7 @@ macro_rules! f { } struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2# - map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@92..93\2#std#FileId(0):1@93..96\2#::#FileId(0):1@97..98\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@110..111\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# + map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2# }#FileId(0):1@132..133\2# "#]], ); diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index f1619db2420a9..23a8fffa8c299 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -308,7 +308,7 @@ impl Attr { return None; } let path = meta.path()?; - let call_site = span_map.span_for_range(path.syntax().text_range()).ctx; + let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; Some(( ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, call_site, diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index fed1705fb7d3e..601a754abbd43 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -254,7 +254,7 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); - let (node, rev_tmap) = token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to); + let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); let token = rev_tmap @@ -312,7 +312,7 @@ fn parse_macro_expansion( tracing::debug!("expanded = {}", tt.as_debug_string()); tracing::debug!("kind = {:?}", expand_to); - let (parse, rev_token_map) = token_tree_to_syntax_node(db, &tt, expand_to); + let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } @@ -674,7 +674,6 @@ fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo { } fn token_tree_to_syntax_node( - db: &dyn ExpandDatabase, tt: &tt::Subtree, expand_to: ExpandTo, ) -> (Parse, ExpansionSpanMap) { @@ -685,18 +684,7 @@ fn token_tree_to_syntax_node( ExpandTo::Type => mbe::TopEntryPoint::Type, ExpandTo::Expr => mbe::TopEntryPoint::Expr, }; - let (parse, mut span_map) = mbe::token_tree_to_syntax_node(tt, entry_point); - // FIXME: now what the hell is going on here - span_map.span_map.sort_by(|(_, a), (_, b)| { - a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| { - let map = db.ast_id_map(a.anchor.file_id.into()); - map.get_erased(a.anchor.ast_id) - .text_range() - .start() - .cmp(&map.get_erased(b.anchor.ast_id).text_range().start()) - }) - }); - (parse, span_map) + mbe::token_tree_to_syntax_node(tt, entry_point) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult>> { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 4e5aa90312610..9027ea1c27b45 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -44,7 +44,7 @@ use crate::{ db::TokenExpander, mod_path::ModPath, proc_macro::ProcMacroExpander, - span::ExpansionSpanMap, + span::{ExpansionSpanMap, SpanMap}, }; pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId}; @@ -172,7 +172,6 @@ pub trait HirFileIdExt { /// For macro-expansion files, returns the file original source file the /// expansion originated from. fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; /// If this is a macro call, returns the syntax node of the call. fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; @@ -218,18 +217,6 @@ impl HirFileIdExt for HirFileId { } } - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { - let mut level = 0; - let mut curr = self; - while let Some(macro_file) = curr.macro_file() { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - - level += 1; - curr = loc.kind.file_id(); - } - level - } - fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -330,6 +317,32 @@ impl HirFileIdExt for HirFileId { } } +pub trait MacroFileIdExt { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; +} + +impl MacroFileIdExt for MacroFileId { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { + let mut level = 0; + let mut macro_file = self; + loop { + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + + level += 1; + macro_file = match loc.kind.file_id().repr() { + HirFileIdRepr::FileId(_) => break level, + HirFileIdRepr::MacroFile(it) => it, + }; + } + } + + /// Return expansion information if it is a macro-expansion file + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { + ExpansionInfo::new(db, self) + } +} + impl MacroDefId { pub fn as_lazy_macro( self, @@ -398,7 +411,7 @@ impl MacroCallLoc { match file_id.repr() { HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range), HirFileIdRepr::MacroFile(m) => { - db.parse_macro_expansion(m).value.1.span_for_range(range) + db.parse_macro_expansion(m).value.1.span_at(range.start()) } } } @@ -565,9 +578,8 @@ pub struct ExpansionInfo { macro_def: TokenExpander, macro_arg: Arc, - exp_map: Arc, - /// [`None`] if the call is in a real file - arg_map: Option>, + pub exp_map: Arc, + arg_map: SpanMap, } impl ExpansionInfo { @@ -582,38 +594,14 @@ impl ExpansionInfo { /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. pub fn map_range_down<'a>( &'a self, - db: &'a dyn db::ExpandDatabase, - FileRange { file_id, range: absolute_range }: FileRange, + span: SpanData, // FIXME: use this for range mapping, so that we can resolve inline format args _relative_token_offset: Option, // FIXME: ret ty should be wrapped in InMacroFile ) -> Option> + 'a> { - // search for all entries in the span map that have the given span and return the - // corresponding text ranges inside the expansion - // FIXME: Make this proper - let span_map = &self.exp_map.span_map; - let (start, end) = if span_map - .first() - .map_or(false, |(_, span)| span.anchor.file_id == file_id) - { - (0, span_map.partition_point(|a| a.1.anchor.file_id == file_id)) - } else { - let start = span_map.partition_point(|a| a.1.anchor.file_id != file_id); - (start, start + span_map[start..].partition_point(|a| a.1.anchor.file_id == file_id)) - }; - let tokens = span_map[start..end] - .iter() - .filter_map(move |(range, span)| { - // we need to resolve the relative ranges here to make sure that we are in fact - // considering differently anchored spans (this might occur with proc-macros) - let offset = db - .ast_id_map(span.anchor.file_id.into()) - .get_erased(span.anchor.ast_id) - .text_range() - .start(); - let abs_range = span.range + offset; - absolute_range.eq(&abs_range).then_some(*range) - }) + let tokens = self + .exp_map + .ranges_with_span(span) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) @@ -626,7 +614,7 @@ impl ExpansionInfo { range: TextRange, ) -> (FileRange, SyntaxContextId) { debug_assert!(self.expanded.value.text_range().contains_range(range)); - let span = self.exp_map.span_for_range(range); + let span = self.exp_map.span_at(range.start()); let anchor_offset = db .ast_id_map(span.anchor.file_id.into()) .get_erased(span.anchor.ast_id) @@ -672,15 +660,15 @@ impl ExpansionInfo { token: TextRange, ) -> InFile> { debug_assert!(self.expanded.value.text_range().contains_range(token)); - let span = self.exp_map.span_for_range(token); + let span = self.exp_map.span_at(token.start()); match &self.arg_map { - None => { + SpanMap::RealSpanMap(_) => { let file_id = span.anchor.file_id.into(); let anchor_offset = db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start(); InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] } } - Some(arg_map) => { + SpanMap::ExpansionSpanMap(arg_map) => { let arg_range = self .arg .value @@ -701,8 +689,7 @@ impl ExpansionInfo { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let arg_tt = loc.kind.arg(db); - let arg_map = - arg_tt.file_id.macro_file().map(|file| db.parse_macro_expansion(file).value.1); + let arg_map = db.span_map(arg_tt.file_id); let macro_def = db.macro_expander(loc.def); let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs index 589f415de566e..c2399259facb5 100644 --- a/crates/hir-expand/src/span.rs +++ b/crates/hir-expand/src/span.rs @@ -11,7 +11,7 @@ use crate::db::ExpandDatabase; pub type ExpansionSpanMap = TokenMap; /// Spanmap for a macro file or a real file -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum SpanMap { /// Spanmap for a macro file ExpansionSpanMap(Arc), @@ -46,7 +46,7 @@ impl mbe::SpanMapper for RealSpanMap { impl SpanMap { pub fn span_for_range(&self, range: TextRange) -> SpanData { match self { - Self::ExpansionSpanMap(span_map) => span_map.span_for_range(range), + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), Self::RealSpanMap(span_map) => span_map.span_for_range(range), } } @@ -62,7 +62,7 @@ impl SpanMap { impl SpanMapRef<'_> { pub fn span_for_range(self, range: TextRange) -> SpanData { match self { - Self::ExpansionSpanMap(span_map) => span_map.span_for_range(range), + Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()), Self::RealSpanMap(span_map) => span_map.span_for_range(range), } } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index f6ee836c5294a..38c4f081b7604 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -17,6 +17,7 @@ use hir_def::{ }; use hir_expand::{ db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId, + MacroFileId, MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; @@ -117,11 +118,11 @@ pub struct Semantics<'db, DB> { pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, - expansion_info_cache: RefCell>>, - // Rootnode to HirFileId cache + expansion_info_cache: RefCell>, + /// Rootnode to HirFileId cache cache: RefCell>, - // MacroCall to its expansion's HirFileId cache - macro_call_cache: RefCell, HirFileId>>, + /// MacroCall to its expansion's MacroFileId cache + macro_call_cache: RefCell, MacroFileId>>, } impl fmt::Debug for Semantics<'_, DB> { @@ -258,7 +259,7 @@ impl<'db> SemanticsImpl<'db> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; - let node = self.parse_or_expand(file_id); + let node = self.parse_or_expand(file_id.into()); Some(node) } @@ -527,6 +528,7 @@ impl<'db> SemanticsImpl<'db> { res } + // FIXME: should only take real file inputs for simplicity fn descend_into_macros_impl( &self, token: SyntaxToken, @@ -537,31 +539,22 @@ impl<'db> SemanticsImpl<'db> { ) { // FIXME: Clean this up let _p = profile::span("descend_into_macros"); - let parent = match token.parent() { + let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { Some(it) => it, None => return, }; - let sa = match self.analyze_no_infer(&parent) { - Some(it) => it, - None => return, - }; - let def_map = sa.resolver.def_map(); - let absolute_range = match sa.file_id.repr() { + + let mut cache = self.expansion_info_cache.borrow_mut(); + let mut mcache = self.macro_call_cache.borrow_mut(); + let span = match sa.file_id.repr() { base_db::span::HirFileIdRepr::FileId(file_id) => { - FileRange { file_id, range: token.text_range() } - } - base_db::span::HirFileIdRepr::MacroFile(m) => { - let span = - self.db.parse_macro_expansion(m).value.1.span_for_range(token.text_range()); - let range = span.range - + self - .db - .ast_id_map(span.anchor.file_id.into()) - .get_erased(span.anchor.ast_id) - .text_range() - .start(); - FileRange { file_id: span.anchor.file_id, range } + self.db.real_span_map(file_id).span_for_range(token.text_range()) } + base_db::span::HirFileIdRepr::MacroFile(macro_file) => cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) + .exp_map + .span_at(token.text_range().start()), }; // fetch span information of token in real file, then use that look through expansions of @@ -569,24 +562,21 @@ impl<'db> SemanticsImpl<'db> { // what about things where spans change? Due to being joined etc, that is we don't find the // exact span anymore? + let def_map = sa.resolver.def_map(); let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; - let mut cache = self.expansion_info_cache.borrow_mut(); - let mut mcache = self.macro_call_cache.borrow_mut(); let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| { let expansion_info = cache .entry(macro_file) - .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) - .as_ref()?; + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); { let InFile { file_id, value } = expansion_info.expanded(); self.cache(value, file_id); } - let mapped_tokens = - expansion_info.map_range_down(self.db.upcast(), absolute_range, None)?; + let mapped_tokens = expansion_info.map_range_down(span, None)?; let len = stack.len(); // requeue the tokens we got from mapping our current token down @@ -599,9 +589,9 @@ impl<'db> SemanticsImpl<'db> { // either due to not being in a macro-call or because its unused push it into the result vec, // otherwise push the remapped tokens back into the queue as they can potentially be remapped again. while let Some(token) = stack.pop() { - self.db.unwind_if_cancelled(); let was_not_remapped = (|| { // First expand into attribute invocations + let containing_attribute_macro_call = self.with_ctx(|ctx| { token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { if item.attrs().next().is_none() { @@ -612,7 +602,7 @@ impl<'db> SemanticsImpl<'db> { }) }); if let Some(call_id) = containing_attribute_macro_call { - let file_id = call_id.as_file(); + let file_id = call_id.as_macro_file(); return process_expansion_for_token(&mut stack, file_id, token.as_ref()); } @@ -629,7 +619,8 @@ impl<'db> SemanticsImpl<'db> { } if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { - let mcall = token.with_value(macro_call); + let mcall: hir_expand::files::InFileWrapper = + token.with_value(macro_call); let file_id = match mcache.get(&mcall) { Some(&it) => it, None => { @@ -659,7 +650,7 @@ impl<'db> SemanticsImpl<'db> { match derive_call { Some(call_id) => { // resolved to a derive - let file_id = call_id.as_file(); + let file_id = call_id.as_macro_file(); return process_expansion_for_token( &mut stack, file_id, @@ -698,7 +689,7 @@ impl<'db> SemanticsImpl<'db> { for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { res = res.or(process_expansion_for_token( &mut stack, - derive.as_file(), + derive.as_macro_file(), token.as_ref(), )); } @@ -1052,7 +1043,7 @@ impl<'db> SemanticsImpl<'db> { fn with_ctx) -> T, T>(&self, f: F) -> T { let mut cache = self.s2d_cache.borrow_mut(); - let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache }; + let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache }; f(&mut ctx) } diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index 5b20c873157d2..df8c1e904fe89 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap pub(super) struct SourceToDefCtx<'a, 'b> { pub(super) db: &'b dyn HirDatabase, - pub(super) cache: &'a mut SourceToDefCache, + pub(super) dynmap_cache: &'a mut SourceToDefCache, } impl SourceToDefCtx<'_, '_> { @@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> { fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap { let db = self.db; - self.cache + self.dynmap_cache .entry((container, file_id)) .or_insert_with(|| container.child_by_source(db, file_id)) } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 0f1093e6d1430..8b1148368961d 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -29,7 +29,7 @@ use hir_expand::{ mod_path::path, name, name::{AsName, Name}, - HirFileId, HirFileIdExt, InFile, + HirFileId, HirFileIdExt, InFile, MacroFileId, MacroFileIdExt, }; use hir_ty::{ diagnostics::{ @@ -753,14 +753,15 @@ impl SourceAnalyzer { &self, db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, - ) -> Option { + ) -> Option { let krate = self.resolver.krate(); let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { self.resolver .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) .map(|(it, _)| macro_id_to_def_id(db.upcast(), it)) })?; - Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) + // why the 64? + Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) } pub(crate) fn resolve_variant( diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index bbf49670ce593..5d802ba86c0c2 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -23,7 +23,7 @@ pub trait SpanMapper { impl SpanMapper for TokenMap { fn span_for(&self, range: TextRange) -> S { - self.span_for_range(range) + self.span_at(range.start()) } } @@ -152,8 +152,8 @@ where { let mut map = TokenMap::empty(); node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { - map.insert( - t.text_range(), + map.push( + t.text_range().start(), SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY }, ); }); @@ -730,15 +730,13 @@ where self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, left); self.inner.finish_node(); - let range = TextRange::at(self.text_pos, TextSize::of(left)); - self.token_map.insert(range, span); + self.token_map.push(self.text_pos + TextSize::of(left), span); // here we move the exit up, the original exit has been deleted in process self.inner.finish_node(); self.inner.token(SyntaxKind::DOT, "."); - let range = TextRange::at(range.end(), TextSize::of(".")); - self.token_map.insert(range, span); + self.token_map.push(self.text_pos + TextSize::of(left) + TextSize::of("."), span); if has_pseudo_dot { assert!(right.is_empty(), "{left}.{right}"); @@ -746,8 +744,7 @@ where assert!(!right.is_empty(), "{left}.{right}"); self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, right); - let range = TextRange::at(range.end(), TextSize::of(right)); - self.token_map.insert(range, span); + self.token_map.push(self.text_pos + TextSize::of(text), span); self.inner.finish_node(); // the parser creates an unbalanced start node, we are required to close it here @@ -772,7 +769,7 @@ where break; } last = self.cursor; - let text: &str = loop { + let (text, span) = loop { break match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed @@ -788,19 +785,13 @@ where } tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), }; - let range = TextRange::at(self.text_pos, TextSize::of(text)); - self.token_map.insert(range, span); self.cursor = self.cursor.bump(); - text + (text, span) } Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { self.cursor = self.cursor.subtree().unwrap(); match delim_to_str(subtree.delimiter.kind, false) { - Some(it) => { - let range = TextRange::at(self.text_pos, TextSize::of(it)); - self.token_map.insert(range, subtree.delimiter.open); - it - } + Some(it) => (it, subtree.delimiter.open), None => continue, } } @@ -808,11 +799,7 @@ where let parent = self.cursor.end().unwrap(); self.cursor = self.cursor.bump(); match delim_to_str(parent.delimiter.kind, true) { - Some(it) => { - let range = TextRange::at(self.text_pos, TextSize::of(it)); - self.token_map.insert(range, parent.delimiter.close); - it - } + Some(it) => (it, parent.delimiter.close), None => continue, } } @@ -820,6 +807,7 @@ where }; self.buf += text; self.text_pos += TextSize::of(text); + self.token_map.push(self.text_pos, span); } self.inner.token(kind, self.buf.as_str()); @@ -839,8 +827,8 @@ where // need to add whitespace either. if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' { self.inner.token(WHITESPACE, " "); - self.token_map.insert(TextRange::at(self.text_pos, TextSize::of(' ')), curr.span); self.text_pos += TextSize::of(' '); + self.token_map.push(self.text_pos, curr.span); } } } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 32f61af25e5d6..c2ec30ca72fb0 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -3,7 +3,7 @@ use std::hash::Hash; use stdx::itertools::Itertools; -use syntax::TextRange; +use syntax::{TextRange, TextSize}; use tt::Span; /// Maps absolute text ranges for the corresponding file to the relevant span data. @@ -12,138 +12,46 @@ use tt::Span; pub struct TokenMap { // FIXME: This needs to be sorted by (FileId, AstId) // Then we can do a binary search on the file id, - // then a bin search on the ast id - pub span_map: Vec<(TextRange, S)>, - // span_map2: rustc_hash::FxHashMap, + // then a bin search on the ast id? + spans: Vec<(TextSize, S)>, } impl TokenMap { - pub fn empty() -> Self { - Self { span_map: Vec::new() } + pub(crate) fn empty() -> Self { + Self { spans: Vec::new() } } - pub fn finish(&mut self) { - debug_assert_eq!( - self.span_map - .iter() - .sorted_by_key(|it| (it.0.start(), it.0.end())) - .tuple_windows() - .find(|(range, next)| range.0.end() != next.0.start()), - None, - "span map has holes!" - ); - self.span_map.shrink_to_fit(); + pub(crate) fn finish(&mut self) { + assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0)); + self.spans.shrink_to_fit(); } - pub(crate) fn insert(&mut self, range: TextRange, span: S) { - self.span_map.push((range, span)); + pub(crate) fn push(&mut self, offset: TextSize, span: S) { + self.spans.push((offset, span)); } pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { // FIXME: linear search - // FIXME: Disregards resolving spans to get more matches! See ExpansionInfo::map_token_down - self.span_map.iter().filter_map( - move |(range, s)| { - if s == &span { - Some(*range) - } else { - None - } - }, - ) + self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { + if s != span { + return None; + } + let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); + Some(TextRange::new(start, end)) + }) } // FIXME: We need APIs for fetching the span of a token as well as for a whole node. The node // one *is* fallible though. - // Token span fetching technically only needs an offset really, as the entire file span is - // populated, where node fetching is more like fetching the spans at all source positions, and - // then we need to verify that all those positions have the same context, if not we fail! But - // how do we handle them having different span ranges? - - pub fn span_for_range(&self, range: TextRange) -> S { - // TODO FIXME: make this proper - self.span_map - .iter() - .filter_map(|(r, s)| Some((r, s, r.intersect(range).filter(|it| !it.is_empty())?))) - .max_by_key(|(_, _, intersection)| intersection.len()) - .map_or_else( - || panic!("no span for range {:?} in {:#?}", range, self.span_map), - |(_, &s, _)| s, - ) + pub fn span_at(&self, offset: TextSize) -> S { + let entry = self.spans.partition_point(|&(it, _)| it <= offset); + self.spans[entry].1 } pub fn spans_for_node_range(&self, range: TextRange) -> impl Iterator + '_ { - // TODO FIXME: make this proper - self.span_map - .iter() - .filter(move |(r, _)| r.intersect(range).filter(|it| !it.is_empty()).is_some()) - .map(|&(_, s)| s) + let (start, end) = (range.start(), range.end()); + let start_entry = self.spans.partition_point(|&(it, _)| it <= start); + let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? + (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s) } - - // pub fn ranges_by_token( - // &self, - // token_id: tt::TokenId, - // kind: SyntaxKind, - // ) -> impl Iterator + '_ { - // self.entries - // .iter() - // .filter(move |&&(tid, _)| tid == token_id) - // .filter_map(move |(_, range)| range.by_kind(kind)) - // } - - // pub(crate) fn remove_delim(&mut self, idx: usize) { - // // FIXME: This could be accidentally quadratic - // self.entries.remove(idx); - // } - - // pub fn entries(&self) -> impl Iterator + '_ { - // self.entries.iter().filter_map(|&(tid, tr)| match tr { - // TokenTextRange::Token(range) => Some((tid, range)), - // TokenTextRange::Delimiter(_) => None, - // }) - // } - - // pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) { - // self.entries.retain(|&(tid, _)| id(tid)); - // } - // pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option { - // self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id) - // } - - // pub fn first_range_by_token( - // &self, - // token_id: tt::TokenId, - // kind: SyntaxKind, - // ) -> Option { - // self.ranges_by_token(token_id, kind).next() - // } - - // pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - // self.entries.push((token_id, TokenTextRange::Token(relative_range))); - // } - - // pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) { - // self.synthetic_entries.push((token_id, id)); - // } - - // pub(crate) fn insert_delim( - // &mut self, - // token_id: tt::TokenId, - // open_relative_range: TextRange, - // close_relative_range: TextRange, - // ) -> usize { - // let res = self.entries.len(); - // let cover = open_relative_range.cover(close_relative_range); - - // self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - // res - // } - - // pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - // let (_, token_text_range) = &mut self.entries[idx]; - // if let TokenTextRange::Delimiter(dim) = token_text_range { - // let cover = dim.cover(close_relative_range); - // *token_text_range = TokenTextRange::Delimiter(cover); - // } - // } } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index b7e839ac2a5e2..a29e18811d881 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -56,8 +56,7 @@ fn integrated_highlighting_benchmark() { analysis.highlight_as_html(file_id, false).unwrap(); } - profile::init_from("*>100"); - // let _s = profile::heartbeat_span(); + profile::init_from("*>1"); { let _it = stdx::timeit("change"); From b98597f06d34efabe95828f9e6d9161fd608bead Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 28 Nov 2023 16:28:51 +0100 Subject: [PATCH 15/80] Re-enable proc-macros --- Cargo.lock | 3 + crates/base-db/src/fixture.rs | 12 + crates/base-db/src/input.rs | 3 + crates/base-db/src/span.rs | 2 + .../hir-def/src/macro_expansion_tests/mod.rs | 5 +- crates/hir-expand/src/db.rs | 55 ++- crates/hir-expand/src/fixup.rs | 273 ++++++------- crates/hir-expand/src/proc_macro.rs | 8 +- crates/load-cargo/src/lib.rs | 24 +- crates/mbe/src/expander/transcriber.rs | 2 +- crates/mbe/src/lib.rs | 3 +- crates/mbe/src/syntax_bridge.rs | 105 +++-- crates/proc-macro-api/Cargo.toml | 7 +- crates/proc-macro-api/src/lib.rs | 39 +- crates/proc-macro-api/src/msg.rs | 127 ++++-- crates/proc-macro-api/src/msg/flat.rs | 381 +++++++++--------- crates/proc-macro-srv-cli/src/main.rs | 6 +- crates/proc-macro-srv/src/dylib.rs | 10 +- crates/proc-macro-srv/src/lib.rs | 38 +- crates/proc-macro-srv/src/proc_macros.rs | 39 +- crates/proc-macro-srv/src/server.rs | 47 ++- .../proc-macro-srv/src/server/token_stream.rs | 64 +-- crates/proc-macro-srv/src/tests/utils.rs | 25 +- .../src/integrated_benchmarks.rs | 2 +- 24 files changed, 787 insertions(+), 493 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 775231f3ea2e9..c71ca2f212604 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" name = "proc-macro-api" version = "0.0.0" dependencies = [ + "base-db", + "indexmap", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "memmap2", "object 0.32.0", "paths", diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 7236b56f6d47d..cfba01a032984 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -543,6 +543,9 @@ impl ProcMacroExpander for IdentityProcMacroExpander { subtree: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } @@ -557,6 +560,9 @@ impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { _: &Subtree, attrs: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { attrs .cloned() @@ -572,6 +578,9 @@ impl ProcMacroExpander for MirrorProcMacroExpander { input: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; @@ -599,6 +608,9 @@ impl ProcMacroExpander for ShortenProcMacroExpander { input: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 0b04a91f626b1..12b449932dd73 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -262,6 +262,9 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { subtree: &tt::Subtree, attrs: Option<&tt::Subtree>, env: &Env, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> Result, ProcMacroExpansionError>; } diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 607b8027ca952..d2b40ecdd2143 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -38,6 +38,8 @@ impl SyntaxContextId { // currently (which kind of makes sense but we need it here!) pub const SELF_REF: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); + /// Used syntax fixups + pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) }); pub fn is_root(self) -> bool { self == Self::ROOT diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 355b82a5f42c1..bcbf4047caa24 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,7 +16,7 @@ mod proc_macros; use std::{iter, ops::Range, sync}; -use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; +use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; use expect_test::Expect; use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId}; use stdx::format_to; @@ -307,6 +307,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander { subtree: &Subtree, _: Option<&Subtree>, _: &base_db::Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result { let (parse, _) = ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 601a754abbd43..1d55aaf1703f8 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -233,7 +233,17 @@ pub fn expand_speculative( let speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { tt.delimiter = tt::Delimiter::UNSPECIFIED; - expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) + let call_site = loc.span(db); + expander.expand( + db, + loc.def.krate, + loc.krate, + &tt, + attr_arg.as_ref(), + call_site, + call_site, + call_site, + ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) @@ -398,17 +408,23 @@ fn macro_arg( MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; let censor = censor_for_macro_input(&loc, &syntax); - // let mut fixups = fixup::fixup_syntax(&node); - // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - // &node, - // fixups.token_map, - // fixups.next_id, - // fixups.replace, - // fixups.append, - // ); - - let mut tt = mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor); + let mut tt = match loc.kind { + MacroCallKind::FnLike { .. } => { + mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + } + MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { + // let mut fixups = crate::fixup::fixup_syntax(&syntax); + // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); + // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + // &node, + // fixups.token_map, + // fixups.next_id, + // fixups.replace, + // fixups.append, + // ); + mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + } + }; if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included @@ -658,8 +674,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult None, }; - let ExpandResult { value: tt, err } = - expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg); + let call_site = loc.span(db); + let ExpandResult { value: tt, err } = expander.expand( + db, + loc.def.krate, + loc.krate, + ¯o_arg, + attr_arg, + // FIXME + call_site, + call_site, + // FIXME + call_site, + ); // Set a hard limit for the expanded tt if let Err(value) = check_tt_count(&tt) { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index e6e8d8c029922..326ff1dec484d 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -2,25 +2,30 @@ //! fix up syntax errors in the code we're passing to them. use std::mem; -use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, +}; +use la_arena::RawIdx; +use mbe::TokenMap; use rustc_hash::FxHashMap; use smallvec::SmallVec; use syntax::{ ast::{self, AstNode, HasLoopBody}, - match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, + match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; -use tt::token_id::Subtree; +use tt::Spacing; + +use crate::tt::{Ident, Leaf, Punct, Subtree}; /// The result of calculating fixes for a syntax node -- a bunch of changes /// (appending to and replacing nodes), the information that is needed to /// reverse those changes afterwards, and a token map. #[derive(Debug, Default)] pub(crate) struct SyntaxFixups { - pub(crate) append: FxHashMap>, - pub(crate) replace: FxHashMap>, + pub(crate) append: FxHashMap>, + pub(crate) replace: FxHashMap>, pub(crate) undo_info: SyntaxFixupUndoInfo, - pub(crate) token_map: TokenMap, - pub(crate) next_id: u32, } /// This is the information needed to reverse the fixups. @@ -29,21 +34,25 @@ pub struct SyntaxFixupUndoInfo { original: Box<[Subtree]>, } -const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); +// censoring -> just don't convert the node +// replacement -> censor + append +// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how +// to remove later pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::::default(); let mut replace = FxHashMap::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); - let mut token_map = TokenMap::default(); - let mut next_id = 0; + let dummy_range = TextRange::empty(TextSize::new(0)); + let dummy_anchor = + SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; + let fake_span = + SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE }; while let Some(event) = preorder.next() { - let node = match event { - syntax::WalkEvent::Enter(node) => node, - syntax::WalkEvent::Leave(_) => continue, - }; + let syntax::WalkEvent::Enter(node) = event else { continue }; + /* TODO if can_handle_error(&node) && has_error_to_handle(&node) { // the node contains an error node, we have to completely replace it by something valid let (original_tree, new_tmap, new_next_id) = @@ -68,6 +77,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { preorder.skip_subtree(); continue; } + */ // In some other situations, we can fix things by just appending some tokens. let end_range = TextRange::empty(node.text_range().end()); match_ast! { @@ -76,36 +86,32 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.name_ref().is_none() { // incomplete field access: some_expr.| append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } }, ast::ExprStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, ast::LetStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -117,28 +123,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(if_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } if it.then_branch().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -150,46 +153,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(while_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, ast::LoopExpr(it) => { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -201,29 +200,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue }; append.insert(match_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID - }, + span: fake_span + }), ]); } if it.match_arm_list().is_none() { // No match arms append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -234,10 +230,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }; let [pat, in_token, iter] = [ - (SyntaxKind::UNDERSCORE, "_"), - (SyntaxKind::IN_KW, "in"), - (SyntaxKind::IDENT, "__ra_fixup") - ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); + "_", + "in", + "__ra_fixup" + ].map(|text| + Leaf::Ident(Ident { + text: text.into(), + span: fake_span + }), + ); if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); @@ -248,18 +249,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -270,8 +270,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { SyntaxFixups { append, replace, - token_map, - next_id, undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, } } @@ -288,40 +286,33 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c)) } -pub(crate) fn reverse_fixups( - tt: &mut Subtree, - token_map: &TokenMap, - undo_info: &SyntaxFixupUndoInfo, -) { +pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { let tts = std::mem::take(&mut tt.token_trees); tt.token_trees = tts .into_iter() + // delete all fake nodes .filter(|tt| match tt { - tt::TokenTree::Leaf(leaf) => { - token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID) - } - tt::TokenTree::Subtree(st) => { - token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID) - } - }) - .flat_map(|tt| match tt { - tt::TokenTree::Subtree(mut tt) => { - reverse_fixups(&mut tt, token_map, undo_info); - SmallVec::from_const([tt.into()]) - } - tt::TokenTree::Leaf(leaf) => { - if let Some(id) = token_map.synthetic_token_id(*leaf.span()) { - let original = undo_info.original[id.0 as usize].clone(); - if original.delimiter.kind == tt::DelimiterKind::Invisible { - original.token_trees.into() - } else { - SmallVec::from_const([original.into()]) - } - } else { - SmallVec::from_const([leaf.into()]) - } - } + tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE, + tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE, }) + // .flat_map(|tt| match tt { TODO + // tt::TokenTree::Subtree(mut tt) => { + // reverse_fixups(&mut tt, undo_info); + // SmallVec::from_const([tt.into()]) + // } + // tt::TokenTree::Leaf(leaf) => { + // if let Some(id) = leaf.span().anchor { + // let original = undo_info.original[id.0 as usize].clone(); + // if original.delimiter.kind == tt::DelimiterKind::Invisible { + // original.token_trees.into() + // } else { + // SmallVec::from_const([original.into()]) + // } + // } else { + // SmallVec::from_const([leaf.into()]) + // } + // } + // }) .collect(); } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 41675c630dcfe..04b5b7b0b6a3e 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -1,6 +1,6 @@ //! Proc Macro Expander stub -use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; +use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use stdx::never; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; @@ -33,6 +33,9 @@ impl ProcMacroExpander { calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> ExpandResult { match self.proc_macro_id { ProcMacroId(DUMMY_ID) => { @@ -68,7 +71,8 @@ impl ProcMacroExpander { let krate_graph = db.crate_graph(); // Proc macros have access to the environment variables of the invoking crate. let env = &krate_graph[calling_crate].env; - match proc_macro.expander.expand(tt, attr_arg, env) { + match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site) + { Ok(t) => ExpandResult::ok(t), Err(err) => match err { // Don't discard the item in case something unexpected happened while expanding attributes diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 4d1319094933f..ed4175c458301 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -376,16 +376,16 @@ impl ProcMacroExpander for Expander { subtree: &tt::Subtree, attrs: Option<&tt::Subtree>, env: &Env, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> Result, ProcMacroExpansionError> { - let _ = (subtree, attrs, env); - - // let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - // match self.0.expand(subtree, attrs, env) { - // Ok(Ok(subtree)) => Ok(subtree), - // Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), - // Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), - // } - todo!() + let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { + Ok(Ok(subtree)) => Ok(subtree), + Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), + } } } @@ -399,6 +399,9 @@ impl ProcMacroExpander for IdentityExpander { subtree: &tt::Subtree, _: Option<&tt::Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } @@ -414,6 +417,9 @@ impl ProcMacroExpander for EmptyExpander { _: &tt::Subtree, _: Option<&tt::Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(tt::Subtree::empty()) } diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index c8b326fa6c535..8aedd73140635 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -51,7 +51,7 @@ impl Bindings { marker(&mut span); let subtree = tt::Subtree { delimiter: tt::Delimiter { - // TODO split span + // FIXME split span open: span, close: span, kind: delimiter.kind, diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 1b13b39f0177b..fdf97ad538cbd 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -34,7 +34,8 @@ pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; pub use crate::{ syntax_bridge::{ - map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, + parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper, }, token_map::TokenMap, diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 5d802ba86c0c2..c61c52628607e 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -63,7 +63,7 @@ pub(crate) mod dummy_test_span_utils { /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -/// TODO: Flesh out the doc comment more thoroughly +/// FIXME: Flesh out the doc comment more thoroughly pub fn syntax_node_to_token_tree( node: &SyntaxNode, map: SpanMap, @@ -179,6 +179,19 @@ where Some(convert_tokens(&mut conv)) } +/// Convert a string to a `TokenTree` +pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option> +where + S: Span, +{ + let lexed = parser::LexedStr::new(text); + if lexed.errors().next().is_some() { + return None; + } + let mut conv = StaticRawConverter { lexed, pos: 0, span }; + Some(convert_tokens(&mut conv)) +} + /// Split token tree with separate expr: $($e:expr)SEP* pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { @@ -213,12 +226,10 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec(conv: &mut C) -> tt::Subtree> +fn convert_tokens(conv: &mut C) -> tt::Subtree where - C: TokenConverter, - Ctx: SyntaxContext, - SpanData: Span, - Anchor: Copy, + C: TokenConverter, + S: Span, { let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); @@ -452,6 +463,12 @@ struct RawConverter<'a, Anchor> { pos: usize, anchor: Anchor, } +/// A raw token (straight from lexer) converter that gives every token the same span. +struct StaticRawConverter<'a, S> { + lexed: parser::LexedStr<'a>, + pos: usize, + span: S, +} trait SrcToken: std::fmt::Debug { fn kind(&self, ctx: &Ctx) -> SyntaxKind; @@ -461,20 +478,16 @@ trait SrcToken: std::fmt::Debug { fn to_text(&self, ctx: &Ctx) -> SmolStr; } -trait TokenConverter: Sized { +trait TokenConverter: Sized { type Token: SrcToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: SpanData, - ) -> Option>>>; + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>; fn peek(&self) -> Option; - fn span_for(&self, range: TextRange) -> SpanData; + fn span_for(&self, range: TextRange) -> S; } impl SrcToken> for usize { @@ -491,7 +504,22 @@ impl SrcToken> for usize { } } -impl TokenConverter for RawConverter<'_, Anchor> +impl SrcToken> for usize { + fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { + ctx.lexed.kind(*self) + } + + fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option { + ctx.lexed.text(*self).chars().next() + } + + fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr { + ctx.lexed.text(*self).into() + } +} + +impl TokenConverter> + for RawConverter<'_, Anchor> where SpanData: Span, { @@ -530,6 +558,41 @@ where } } +impl TokenConverter for StaticRawConverter<'_, S> +where + S: Span, +{ + type Token = usize; + + fn convert_doc_comment(&self, &token: &usize, span: S) -> Option>> { + let text = self.lexed.text(token); + convert_doc_comment(&doc_comment(text), span) + } + + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + if self.pos == self.lexed.len() { + return None; + } + let token = self.pos; + self.pos += 1; + let range = self.lexed.text_range(token); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); + + Some((token, range)) + } + + fn peek(&self) -> Option { + if self.pos == self.lexed.len() { + return None; + } + Some(self.pos) + } + + fn span_for(&self, _: TextRange) -> S { + self.span + } +} + struct Converter { current: Option, preorder: PreorderWithTokens, @@ -596,17 +659,13 @@ impl SrcToken> for SynToken { } } -impl TokenConverter for Converter +impl TokenConverter for Converter where - SpanData: Span, - SpanMap: SpanMapper>, + S: Span, + SpanMap: SpanMapper, { type Token = SynToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: SpanData, - ) -> Option>>> { + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>> { convert_doc_comment(token.token(), span) } @@ -661,7 +720,7 @@ where Some(token) } - fn span_for(&self, range: TextRange) -> SpanData { + fn span_for(&self, range: TextRange) -> S { self.map.span_for(range) } } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 4c87c89adde97..2cbbc9489a294 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -25,6 +25,7 @@ tracing.workspace = true triomphe.workspace = true memmap2 = "0.5.4" snap = "1.1.0" +indexmap = "2.1.0" # local deps paths.workspace = true @@ -32,5 +33,7 @@ tt.workspace = true stdx.workspace = true profile.workspace = true text-size.workspace = true -# Intentionally *not* depend on anything salsa-related -# base-db.workspace = true +# Ideally this crate would not depend on salsa things, but we need span information here which wraps +# InternIds for the syntax context +base-db.workspace = true +la-arena.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 7a3580f814e27..9fc5a53d935f8 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -11,6 +11,8 @@ pub mod msg; mod process; mod version; +use base_db::span::SpanData; +use indexmap::IndexSet; use paths::AbsPathBuf; use std::{fmt, io, sync::Mutex}; use triomphe::Arc; @@ -18,7 +20,7 @@ use triomphe::Arc; use serde::{Deserialize, Serialize}; use crate::{ - msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage}, + msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, process::ProcMacroProcessSrv, }; @@ -132,32 +134,49 @@ impl ProcMacro { self.kind } - pub fn expand>( + pub fn expand( &self, - subtree: &tt::Subtree, - attr: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - ) -> Result, PanicMessage>, ServerError> { + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, + ) -> Result, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() .find(|(name, _)| name == "CARGO_MANIFEST_DIR") .map(|(_, value)| value.clone()); + let mut span_data_table = IndexSet::default(); + let def_site = span_data_table.insert_full(def_site).0; + let call_site = span_data_table.insert_full(call_site).0; + let mixed_site = span_data_table.insert_full(mixed_site).0; let task = ExpandMacro { - macro_body: FlatTree::new(subtree, version), + macro_body: FlatTree::new(subtree, version, &mut span_data_table), macro_name: self.name.to_string(), - attributes: attr.map(|subtree| FlatTree::new(subtree, version)), + attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), lib: self.dylib_path.to_path_buf().into(), env, current_dir, + has_global_spans: ExpnGlobals { + serialize: version >= HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, }; - let request = msg::Request::ExpandMacro(task); - let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?; + let response = self + .process + .lock() + .unwrap_or_else(|e| e.into_inner()) + .send_task(msg::Request::ExpandMacro(task))?; + match response { msg::Response::ExpandMacro(it) => { - Ok(it.map(|tree| FlatTree::to_subtree(tree, version))) + Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { Err(ServerError { message: "unexpected response".to_string(), io: None }) diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 4e6984f61b727..ddac514ff7094 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -10,21 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use crate::ProcMacroKind; -pub use crate::msg::flat::FlatTree; +pub use crate::msg::flat::{FlatTree, TokenId}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; -/// This version changes how spans are encoded, kind of. Prior to this version, -/// spans were represented as a single u32 which effectively forced spans to be -/// token ids. Starting with this version, the span fields are still u32, -/// but if the size of the span is greater than 1 then the span data is encoded in -/// an additional vector where the span represents the offset into that vector. -/// This allows encoding bigger spans while supporting the previous versions. -pub const VARIABLE_SIZED_SPANS: u32 = 2; +pub const HAS_GLOBAL_SPANS: u32 = 3; -pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS; +pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -66,6 +60,24 @@ pub struct ExpandMacro { pub env: Vec<(String, String)>, pub current_dir: Option, + /// marker for serde skip stuff + #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] + pub has_global_spans: ExpnGlobals, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpnGlobals { + #[serde(skip_serializing)] + pub serialize: bool, + pub def_site: usize, + pub call_site: usize, + pub mixed_site: usize, +} + +impl ExpnGlobals { + fn skip_serializing_if(&self) -> bool { + !self.serialize + } } pub trait Message: Serialize + DeserializeOwned { @@ -120,38 +132,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { Ok(()) } -/*TODO - #[cfg(test)] mod tests { - use tt::{ - Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree, - TokenId, TokenTree, + use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, }; + use la_arena::RawIdx; + use text_size::{TextRange, TextSize}; + use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; - fn fixture_token_tree() -> Subtree { - let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() }; - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into())); - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into())); + fn fixture_token_tree() -> Subtree { + let anchor = + SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; + let mut subtree = Subtree { + delimiter: Delimiter { + open: SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::empty(TextSize::new(13)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + kind: DelimiterKind::Invisible, + }, + token_trees: Vec::new(), + }; + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "struct".into(), + span: SpanData { + range: TextRange::at(TextSize::new(0), TextSize::of("struct")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "Foo".into(), + span: SpanData { + range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: TokenId::DUMMY, + + span: SpanData { + range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: TokenId::DUMMY, + span: SpanData { + range: TextRange::at(TextSize::new(11), TextSize::of('@')), + anchor, + ctx: SyntaxContextId::ROOT, + }, spacing: Spacing::Joint, }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { - open: TokenId(2), - close: TokenId::DUMMY, + open: SpanData { + range: TextRange::at(TextSize::new(12), TextSize::of('{')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::at(TextSize::new(13), TextSize::of('}')), + anchor, + ctx: SyntaxContextId::ROOT, + }, kind: DelimiterKind::Brace, }, token_trees: vec![], @@ -162,20 +225,26 @@ mod tests { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); + let mut span_data_table = Default::default(); let task = ExpandMacro { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION), + macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_name: Default::default(), attributes: None, lib: std::env::current_dir().unwrap(), env: Default::default(), current_dir: Default::default(), + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, }; let json = serde_json::to_string(&task).unwrap(); // println!("{}", json); let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION)); + assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); } } -*/ diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index f29aac5295682..baf8bbad4bf2b 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -37,40 +37,40 @@ use std::collections::{HashMap, VecDeque}; +use base_db::span::SpanData; +use indexmap::IndexSet; use serde::{Deserialize, Serialize}; -use text_size::TextRange; -use tt::{Span, SyntaxContext}; -use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; +use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -pub trait SerializableSpan: Span { - fn into_u32(self) -> [u32; L]; - fn from_u32(input: [u32; L]) -> Self; +pub type SpanDataIndexMap = IndexSet; + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(pub u32); + +impl std::fmt::Debug for TokenId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl TokenId { + pub const DEF_SITE: Self = TokenId(0); + pub const CALL_SITE: Self = TokenId(0); + pub const MIXED_SITE: Self = TokenId(0); } -// impl SerializableSpan<1> for tt::TokenId { -// fn into_u32(self) -> [u32; 1] { -// [self.0] -// } -// fn from_u32([input]: [u32; 1]) -> Self { -// tt::TokenId(input) -// } -// } - -impl SerializableSpan<3> for tt::SpanData -where - Anchor: From + Into, - Self: Span, - Ctx: SyntaxContext, -{ - fn into_u32(self) -> [u32; 3] { - [self.anchor.into(), self.range.start().into(), self.range.end().into()] + +impl tt::Span for TokenId { + const DUMMY: Self = TokenId(!0); + + type Anchor = (); + + fn anchor(self) -> Self::Anchor { + () } - fn from_u32([file_id, start, end]: [u32; 3]) -> Self { - tt::SpanData { - anchor: file_id.into(), - range: TextRange::new(start.into(), end.into()), - ctx: Ctx::DUMMY, - } + + fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self { + Self::DUMMY } } @@ -82,82 +82,41 @@ pub struct FlatTree { ident: Vec, token_tree: Vec, text: Vec, - #[serde(skip_serializing_if = "SpanMap::do_serialize")] - #[serde(default)] - span_map: SpanMap, -} - -#[derive(Serialize, Deserialize, Debug)] -struct SpanMap { - #[serde(skip_serializing)] - serialize: bool, - span_size: u32, - spans: Vec, -} - -impl Default for SpanMap { - fn default() -> Self { - Self { serialize: false, span_size: 1, spans: Default::default() } - } -} - -impl SpanMap { - fn serialize_span>(&mut self, span: S) -> u32 { - let u32s = span.into_u32(); - if L == 1 { - u32s[0] - } else { - let offset = self.spans.len() as u32; - self.spans.extend(u32s); - offset - } - } - fn deserialize_span>(&self, offset: u32) -> S { - S::from_u32(if L == 1 { - [offset].as_ref().try_into().unwrap() - } else { - self.spans[offset as usize..][..L].try_into().unwrap() - }) - } } -impl SpanMap { - fn do_serialize(&self) -> bool { - self.serialize - } -} - -struct SubtreeRepr { - open: S, - close: S, +struct SubtreeRepr { + open: TokenId, + close: TokenId, kind: tt::DelimiterKind, tt: [u32; 2], } -struct LiteralRepr { - id: S, +struct LiteralRepr { + id: TokenId, text: u32, } -struct PunctRepr { - id: S, +struct PunctRepr { + id: TokenId, char: char, spacing: tt::Spacing, } -struct IdentRepr { - id: S, +struct IdentRepr { + id: TokenId, text: u32, } impl FlatTree { - pub fn new>( - subtree: &tt::Subtree, + pub fn new( + subtree: &tt::Subtree, version: u32, + span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { let mut w = Writer { string_table: HashMap::new(), work: VecDeque::new(), + span_data_table, subtree: Vec::new(), literal: Vec::new(), @@ -167,78 +126,111 @@ impl FlatTree { text: Vec::new(), }; w.write(subtree); - assert!(L == 1 || version >= VARIABLE_SIZED_SPANS); - let mut span_map = SpanMap { - serialize: version >= VARIABLE_SIZED_SPANS && L != 1, - span_size: L as u32, - spans: Vec::new(), - }; - return FlatTree { + + FlatTree { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span) + write_vec(w.subtree, SubtreeRepr::write_with_close_span) } else { - write_vec(&mut span_map, w.subtree, SubtreeRepr::write) + write_vec(w.subtree, SubtreeRepr::write) }, - literal: write_vec(&mut span_map, w.literal, LiteralRepr::write), - punct: write_vec(&mut span_map, w.punct, PunctRepr::write), - ident: write_vec(&mut span_map, w.ident, IdentRepr::write), + literal: write_vec(w.literal, LiteralRepr::write), + punct: write_vec(w.punct, PunctRepr::write), + ident: write_vec(w.ident, IdentRepr::write), token_tree: w.token_tree, text: w.text, - span_map, + } + } + + pub fn new_raw(subtree: &tt::Subtree, version: u32) -> FlatTree { + let mut w = Writer { + string_table: HashMap::new(), + work: VecDeque::new(), + span_data_table: &mut (), + + subtree: Vec::new(), + literal: Vec::new(), + punct: Vec::new(), + ident: Vec::new(), + token_tree: Vec::new(), + text: Vec::new(), }; + w.write(subtree); - fn write_vec [u32; N], const N: usize>( - map: &mut SpanMap, - xs: Vec, - f: F, - ) -> Vec { - xs.into_iter().flat_map(|it| f(it, map)).collect() + FlatTree { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + write_vec(w.subtree, SubtreeRepr::write_with_close_span) + } else { + write_vec(w.subtree, SubtreeRepr::write) + }, + literal: write_vec(w.literal, LiteralRepr::write), + punct: write_vec(w.punct, PunctRepr::write), + ident: write_vec(w.ident, IdentRepr::write), + token_tree: w.token_tree, + text: w.text, } } - pub fn to_subtree>( + pub fn to_subtree_resolved( self, version: u32, - ) -> tt::Subtree { - assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size); - return Reader { + span_data_table: &SpanDataIndexMap, + ) -> tt::Subtree { + Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span) + read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { - read_vec(&self.span_map, self.subtree, SubtreeRepr::read) + read_vec(self.subtree, SubtreeRepr::read) }, - literal: read_vec(&self.span_map, self.literal, LiteralRepr::read), - punct: read_vec(&self.span_map, self.punct, PunctRepr::read), - ident: read_vec(&self.span_map, self.ident, IdentRepr::read), + literal: read_vec(self.literal, LiteralRepr::read), + punct: read_vec(self.punct, PunctRepr::read), + ident: read_vec(self.ident, IdentRepr::read), token_tree: self.token_tree, text: self.text, + span_data_table, } - .read(); - - fn read_vec T, const N: usize>( - map: &SpanMap, - xs: Vec, - f: F, - ) -> Vec { - let mut chunks = xs.chunks_exact(N); - let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect(); - assert!(chunks.remainder().is_empty()); - res + .read() + } + + pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree { + Reader { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + read_vec(self.subtree, SubtreeRepr::read_with_close_span) + } else { + read_vec(self.subtree, SubtreeRepr::read) + }, + literal: read_vec(self.literal, LiteralRepr::read), + punct: read_vec(self.punct, PunctRepr::read), + ident: read_vec(self.ident, IdentRepr::read), + token_tree: self.token_tree, + text: self.text, + span_data_table: &(), } + .read() } } -impl> SubtreeRepr { - fn write(self, map: &mut SpanMap) -> [u32; 4] { +fn read_vec T, const N: usize>(xs: Vec, f: F) -> Vec { + let mut chunks = xs.chunks_exact(N); + let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect(); + assert!(chunks.remainder().is_empty()); + res +} + +fn write_vec [u32; N], const N: usize>(xs: Vec, f: F) -> Vec { + xs.into_iter().flat_map(f).collect() +} + +impl SubtreeRepr { + fn write(self) -> [u32; 4] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [map.serialize_span(self.open), kind, self.tt[0], self.tt[1]] + [self.open.0, kind, self.tt[0], self.tt[1]] } - fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self { + fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -246,24 +238,18 @@ impl> SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] } + SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } } - fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] { + fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [ - map.serialize_span(self.open), - map.serialize_span(self.close), - kind, - self.tt[0], - self.tt[1], - ] + [self.open.0, self.close.0, kind, self.tt[0], self.tt[1]] } - fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self { + fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -271,64 +257,86 @@ impl> SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { - open: map.deserialize_span(open), - close: map.deserialize_span(close), - kind, - tt: [lo, len], - } + SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] } } } -impl> LiteralRepr { - fn write(self, map: &mut SpanMap) -> [u32; 2] { - [map.serialize_span(self.id), self.text] +impl LiteralRepr { + fn write(self) -> [u32; 2] { + [self.id.0, self.text] } - fn read([id, text]: [u32; 2], map: &SpanMap) -> Self { - LiteralRepr { id: map.deserialize_span(id), text } + fn read([id, text]: [u32; 2]) -> LiteralRepr { + LiteralRepr { id: TokenId(id), text } } } -impl> PunctRepr { - fn write(self, map: &mut SpanMap) -> [u32; 3] { +impl PunctRepr { + fn write(self) -> [u32; 3] { let spacing = match self.spacing { tt::Spacing::Alone => 0, tt::Spacing::Joint => 1, }; - [map.serialize_span(self.id), self.char as u32, spacing] + [self.id.0, self.char as u32, spacing] } - fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self { + fn read([id, char, spacing]: [u32; 3]) -> PunctRepr { let spacing = match spacing { 0 => tt::Spacing::Alone, 1 => tt::Spacing::Joint, other => panic!("bad spacing {other}"), }; - PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing } + PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } } } -impl> IdentRepr { - fn write(self, map: &mut SpanMap) -> [u32; 2] { - [map.serialize_span(self.id), self.text] +impl IdentRepr { + fn write(self) -> [u32; 2] { + [self.id.0, self.text] } - fn read(data: [u32; 2], map: &SpanMap) -> Self { - IdentRepr { id: map.deserialize_span(data[0]), text: data[1] } + fn read(data: [u32; 2]) -> IdentRepr { + IdentRepr { id: TokenId(data[0]), text: data[1] } } } -struct Writer<'a, const L: usize, S> { +trait Span: Copy { + type Table; + fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; +} + +impl Span for TokenId { + type Table = (); + fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { + token_id + } + + fn span_for_token_id((): &Self::Table, id: TokenId) -> Self { + id + } +} +impl Span for SpanData { + type Table = IndexSet; + fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { + TokenId(table.insert_full(span).0 as u32) + } + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self { + *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0]) + } +} + +struct Writer<'a, 'span, S: Span> { work: VecDeque<(usize, &'a tt::Subtree)>, string_table: HashMap<&'a str, u32>, + span_data_table: &'span mut S::Table, - subtree: Vec>, - literal: Vec>, - punct: Vec>, - ident: Vec>, + subtree: Vec, + literal: Vec, + punct: Vec, + ident: Vec, token_tree: Vec, text: Vec, } -impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { +impl<'a, 'span, S: Span> Writer<'a, 'span, S> { fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { @@ -336,6 +344,10 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { } } + fn token_id_of(&mut self, span: S) -> TokenId { + S::token_id_of(self.span_data_table, span) + } + fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.token_trees.len(); @@ -353,22 +365,21 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { tt::Leaf::Literal(lit) => { let idx = self.literal.len() as u32; let text = self.intern(&lit.text); - self.literal.push(LiteralRepr { id: lit.span, text }); + let id = self.token_id_of(lit.span); + self.literal.push(LiteralRepr { id, text }); idx << 2 | 0b01 } tt::Leaf::Punct(punct) => { let idx = self.punct.len() as u32; - self.punct.push(PunctRepr { - char: punct.char, - spacing: punct.spacing, - id: punct.span, - }); + let id = self.token_id_of(punct.span); + self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id }); idx << 2 | 0b10 } tt::Leaf::Ident(ident) => { let idx = self.ident.len() as u32; let text = self.intern(&ident.text); - self.ident.push(IdentRepr { id: ident.span, text }); + let id = self.token_id_of(ident.span); + self.ident.push(IdentRepr { id, text }); idx << 2 | 0b11 } }, @@ -380,8 +391,8 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { let idx = self.subtree.len(); - let open = subtree.delimiter.open; - let close = subtree.delimiter.close; + let open = self.token_id_of(subtree.delimiter.open); + let close = self.token_id_of(subtree.delimiter.close); let delimiter_kind = subtree.delimiter.kind; self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] }); self.work.push_back((idx, subtree)); @@ -398,23 +409,29 @@ impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { } } -struct Reader { - subtree: Vec>, - literal: Vec>, - punct: Vec>, - ident: Vec>, +struct Reader<'span, S: Span> { + subtree: Vec, + literal: Vec, + punct: Vec, + ident: Vec, token_tree: Vec, text: Vec, + span_data_table: &'span S::Table, } -impl> Reader { +impl<'span, S: Span> Reader<'span, S> { pub(crate) fn read(self) -> tt::Subtree { let mut res: Vec>> = vec![None; self.subtree.len()]; + let read_span = |id| S::span_for_token_id(self.span_data_table, id); for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; let s = tt::Subtree { - delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind }, + delimiter: tt::Delimiter { + open: read_span(repr.open), + close: read_span(repr.close), + kind: repr.kind, + }, token_trees: token_trees .iter() .copied() @@ -429,7 +446,7 @@ impl> Reader { let repr = &self.literal[idx]; tt::Leaf::Literal(tt::Literal { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -438,7 +455,7 @@ impl> Reader { tt::Leaf::Punct(tt::Punct { char: repr.char, spacing: repr.spacing, - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -446,7 +463,7 @@ impl> Reader { let repr = &self.ident[idx]; tt::Leaf::Ident(tt::Ident { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index ea65c33604f8e..50ce586fc429b 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -18,14 +18,12 @@ fn main() -> std::io::Result<()> { run() } -#[cfg(not(FALSE))] -#[cfg(not(feature = "sysroot-abi"))] +#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] fn run() -> io::Result<()> { panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); } -#[cfg(FALSE)] -#[cfg(feature = "sysroot-abi")] +#[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index dd05e250c2def..80bce3af1a006 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,7 @@ use libloading::Library; use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -152,8 +152,14 @@ impl Expander { macro_name: &str, macro_body: &crate::tt::Subtree, attributes: Option<&crate::tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { - let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes); + let result = self + .inner + .proc_macros + .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site); result.map_err(|e| e.as_str().unwrap_or_else(|| "".to_string())) } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index bd0d1b79fa1fc..32a07a84777bf 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -10,7 +10,6 @@ //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… -#![cfg(FALSE)] // TODO #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] @@ -32,12 +31,25 @@ use std::{ time::SystemTime, }; +use ::tt::Span; use proc_macro_api::{ - msg::{self, CURRENT_API_VERSION}, + msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS}, ProcMacroKind, }; -use ::tt::token_id as tt; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Delimiter = ::tt::Delimiter; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} // see `build.rs` include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); @@ -71,16 +83,28 @@ impl ProcMacroSrv { None => None, }; - let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION)); + let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) .name(task.macro_name.clone()) .spawn_scoped(s, || { expander - .expand(&task.macro_name, ¯o_body, attributes.as_ref()) - .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION)) + .expand( + &task.macro_name, + ¯o_body, + attributes.as_ref(), + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) }); let res = match thread { Ok(handle) => handle.join(), diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 3c6f320331928..4f87fa281b77e 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -1,7 +1,7 @@ //! Proc macro ABI use libloading::Library; -use proc_macro_api::{ProcMacroKind, RustCInfo}; +use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; @@ -45,6 +45,9 @@ impl ProcMacros { macro_name: &str, macro_body: &tt::Subtree, attributes: Option<&tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); @@ -59,34 +62,56 @@ impl ProcMacros { } if *trait_name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } proc_macro::bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } proc_macro::bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + + call_site, + def_site, + mixed_site, + }, parsed_attributes, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } _ => continue, } diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index fe18451d38482..fc080eccc0f4a 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -11,6 +11,7 @@ use proc_macro::bridge::{self, server}; mod token_stream; +use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; use token_stream::TokenStreamBuilder; @@ -43,6 +44,9 @@ pub struct FreeFunctions; pub struct RustAnalyzer { // FIXME: store span information here. pub(crate) interner: SymbolInternerRef, + pub call_site: TokenId, + pub def_site: TokenId, + pub mixed_site: TokenId, } impl server::Types for RustAnalyzer { @@ -69,7 +73,7 @@ impl server::FreeFunctions for RustAnalyzer { kind: bridge::LitKind::Err, symbol: Symbol::intern(self.interner, s), suffix: None, - span: tt::TokenId::unspecified(), + span: self.call_site, }) } @@ -83,7 +87,7 @@ impl server::TokenStream for RustAnalyzer { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - src.parse().expect("cannot parse string") + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() @@ -280,7 +284,7 @@ impl server::Span for RustAnalyzer { } fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { // FIXME stub - tt::TokenId::unspecified() + self.call_site } /// Recent feature, not yet in the proc_macro /// @@ -317,15 +321,15 @@ impl server::Span for RustAnalyzer { } fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { // FIXME handle span - tt::TokenId::unspecified() + self.call_site } fn end(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn start(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn line(&mut self, _span: Self::Span) -> usize { @@ -349,9 +353,9 @@ impl server::Symbol for RustAnalyzer { impl server::Server for RustAnalyzer { fn globals(&mut self) -> bridge::ExpnGlobals { bridge::ExpnGlobals { - def_site: Span::unspecified(), - call_site: Span::unspecified(), - mixed_site: Span::unspecified(), + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, } } @@ -422,6 +426,8 @@ impl LiteralFormatter { #[cfg(test)] mod tests { + use ::tt::Span; + use super::*; #[test] @@ -430,16 +436,16 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -452,33 +458,32 @@ mod tests { #[test] fn test_ra_server_from_str() { - use std::str::FromStr; let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind: tt::DelimiterKind::Parenthesis, }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, }))], }); - let t1 = TokenStream::from_str("(a)").unwrap(); + let t1 = TokenStream::from_str("(a)", tt::TokenId::DUMMY).unwrap(); assert_eq!(t1.token_trees.len(), 1); assert_eq!(t1.token_trees[0], subtree_paren_a); - let t2 = TokenStream::from_str("(a);").unwrap(); + let t2 = TokenStream::from_str("(a);", tt::TokenId::DUMMY).unwrap(); assert_eq!(t2.token_trees.len(), 2); assert_eq!(t2.token_trees[0], subtree_paren_a); - let underscore = TokenStream::from_str("_").unwrap(); + let underscore = TokenStream::from_str("_", tt::TokenId::DUMMY).unwrap(); assert_eq!( underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })) ); } diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 2589d8b64d489..36be88250388d 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,5 +1,7 @@ //! TokenStream implementation used by sysroot ABI +use proc_macro_api::msg::TokenId; + use crate::tt::{self, TokenTree}; #[derive(Debug, Default, Clone)] @@ -20,8 +22,15 @@ impl TokenStream { } } - pub(crate) fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees } + pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + tt::Subtree { + delimiter: tt::Delimiter { + open: call_site, + close: call_site, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: self.token_trees, + } } pub(super) fn is_empty(&self) -> bool { @@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder { /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use std::str::FromStr; + use proc_macro_api::msg::TokenId; use super::{tt, TokenStream, TokenTree}; @@ -109,14 +118,15 @@ pub(super) mod token_stream { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; + #[rustfmt::skip] + impl /*FromStr for*/ TokenStream { + // type Err = LexError; - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; + pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result { + let subtree = + mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_unspecified(subtree); + let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } @@ -127,43 +137,39 @@ pub(super) mod token_stream { } } - fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { + fn subtree_replace_token_ids_with_call_site( + subtree: tt::Subtree, + call_site: TokenId, + ) -> tt::Subtree { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, - ..subtree.delimiter - }, + delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, token_trees: subtree .token_trees .into_iter() - .map(token_tree_replace_token_ids_with_unspecified) + .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) .collect(), } } - fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree { + fn token_tree_replace_token_ids_with_call_site( + tt: tt::TokenTree, + call_site: TokenId, + ) -> tt::TokenTree { match tt { tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf)) + tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) } tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree)) + tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) } } } - fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { + fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { match leaf { - tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit }) - } - tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct }) - } - tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident }) - } + tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), + tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), + tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }), } } } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 49b4d973b63bb..ccfefafb2cfc8 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -1,18 +1,19 @@ //! utils used in proc-macro tests use expect_test::Expect; -use std::str::FromStr; +use proc_macro_api::msg::TokenId; +use tt::Span; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str) -> Option { +fn parse_string(code: &str, call_site: TokenId) -> Option { // This is a bit strange. We need to parse a string into a token stream into // order to create a tt::SubTree from it in fixtures. `into_subtree` is // implemented by all the ABIs we have so we arbitrarily choose one ABI to // write a `parse_string` function for and use that. The tests don't really // care which ABI we're using as the `into_subtree` function isn't part of // the ABI and shouldn't change between ABI versions. - crate::server::TokenStream::from_str(code).ok() + crate::server::TokenStream::from_str(code, call_site).ok() } pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) { @@ -24,12 +25,22 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e } fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { + let call_site = TokenId::DUMMY; let path = proc_macro_test_dylib_path(); let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input).unwrap(); - let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); - - let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); + let fixture = parse_string(input, call_site).unwrap(); + let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + + let res = expander + .expand( + macro_name, + &fixture.into_subtree(call_site), + attr.as_ref(), + TokenId::DUMMY, + TokenId::DUMMY, + TokenId::DUMMY, + ) + .unwrap(); expect.assert_eq(&format!("{res:?}")); } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index a29e18811d881..a865d9e4ab821 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -56,7 +56,7 @@ fn integrated_highlighting_benchmark() { analysis.highlight_as_html(file_id, false).unwrap(); } - profile::init_from("*>1"); + profile::init_from("*>100"); { let _it = stdx::timeit("change"); From 7a8c4c001ba82e360d77d17a5815f2c6fd50e3e5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 28 Nov 2023 16:50:05 +0100 Subject: [PATCH 16/80] Turn macro_expand from query to normal function --- crates/base-db/src/span.rs | 12 ++++-------- .../hir-def/src/macro_expansion_tests/proc_macros.rs | 1 - crates/hir-expand/src/db.rs | 5 +---- crates/hir-expand/src/files.rs | 1 + crates/hir-expand/src/fixup.rs | 4 ++-- crates/hir-expand/src/span.rs | 2 ++ crates/hir/src/db.rs | 4 ++-- crates/ide-completion/src/tests/proc_macros.rs | 2 -- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/lib.rs | 7 ------- crates/proc-macro-api/src/msg/flat.rs | 8 +------- crates/proc-macro-srv/src/lib.rs | 3 +-- 12 files changed, 15 insertions(+), 36 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index d2b40ecdd2143..420c669641cc4 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -1,4 +1,4 @@ -/// File and span related types. +//! File and span related types. // FIXME: This should probably be moved into its own crate. use std::fmt; @@ -26,19 +26,15 @@ impl fmt::Display for SyntaxContextId { impl SyntaxContext for SyntaxContextId { const DUMMY: Self = Self::ROOT; - // veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so - // we need a special value that behaves as the current context. } // inherent trait impls please tyvm impl SyntaxContextId { - // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context - // currently (which kind of makes sense but we need it here!) pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); - // TODO: This is very much UB, salsa exposes no way to create an InternId in a const context - // currently (which kind of makes sense but we need it here!) + // veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so + // we need a special value that behaves as the current context. pub const SELF_REF: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); - /// Used syntax fixups + // Used for syntax fixups pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) }); pub fn is_root(self) -> bool { diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 548f22499b315..060b8aa8c1931 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -74,7 +74,6 @@ fn foo() { } #[test] -#[ignore] // TODO fn attribute_macro_syntax_completion_2() { // common case of dot completion while typing check( diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 1d55aaf1703f8..00a7b0cb3fef4 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -146,9 +146,6 @@ pub trait ExpandDatabase: SourceDatabase { id: AstId, ) -> Arc; - /// Expand macro call to a token tree. - // This query is LRU cached - fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult>; #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)] fn include_expand( &self, @@ -315,7 +312,7 @@ fn parse_macro_expansion( macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); - let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); + let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id); let expand_to = macro_expand_to(db, macro_file.macro_call_id); diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 57a7fa5ec398f..36eb2dbb27c4f 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -1,3 +1,4 @@ +//! Things to wrap other things in file ids. use std::iter; use base_db::{ diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 326ff1dec484d..8fa7d5762958e 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -52,7 +52,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { while let Some(event) = preorder.next() { let syntax::WalkEvent::Enter(node) = event else { continue }; - /* TODO + /* if can_handle_error(&node) && has_error_to_handle(&node) { // the node contains an error node, we have to completely replace it by something valid let (original_tree, new_tmap, new_next_id) = @@ -295,7 +295,7 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE, tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE, }) - // .flat_map(|tt| match tt { TODO + // .flat_map(|tt| match tt { // tt::TokenTree::Subtree(mut tt) => { // reverse_fixups(&mut tt, undo_info); // SmallVec::from_const([tt.into()]) diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs index c2399259facb5..1703923d11a1c 100644 --- a/crates/hir-expand/src/span.rs +++ b/crates/hir-expand/src/span.rs @@ -1,3 +1,5 @@ +//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well +//! as associating spans with text ranges in a particular file. use base_db::{ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, FileId, diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index b9112dee31661..ff6f987aa1286 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -23,7 +23,7 @@ pub use hir_def::db::{ }; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, MacroArgQuery, - MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, + ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, InternSyntaxContextQuery, + MacroArgQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs index e55d1f315fedf..2d6234e310c60 100644 --- a/crates/ide-completion/src/tests/proc_macros.rs +++ b/crates/ide-completion/src/tests/proc_macros.rs @@ -9,7 +9,6 @@ fn check(ra_fixture: &str, expect: Expect) { } #[test] -#[ignore] // todo fn complete_dot_in_attr() { check( r#" @@ -41,7 +40,6 @@ fn main() { } #[test] -#[ignore] // TODO fn complete_dot_in_attr2() { check( r#" diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 2efe3ee1ed757..67e686dad11ef 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -101,8 +101,8 @@ impl RootDatabase { hir::db::ExpandProcMacroQuery hir::db::IncludeExpandQuery hir::db::InternMacroCallQuery + hir::db::InternSyntaxContextQuery hir::db::MacroArgQuery - hir::db::MacroExpandQuery hir::db::ParseMacroExpansionQuery hir::db::RealSpanMapQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 38c9a3538f3a7..cbd51e67810c3 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -157,7 +157,6 @@ impl RootDatabase { base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); // macro expansions are usually rather small, so we can afford to keep more of them alive hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); - hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); } pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap, usize>) { @@ -175,12 +174,6 @@ impl RootDatabase { .copied() .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), ); - hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity( - lru_capacities - .get(stringify!(MacroExpandQuery)) - .copied() - .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), - ); macro_rules! update_lru_capacity_per_query { ($( $module:ident :: $query:ident )*) => {$( diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index baf8bbad4bf2b..43840fa333d78 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -43,7 +43,7 @@ use serde::{Deserialize, Serialize}; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -pub type SpanDataIndexMap = IndexSet; +type SpanDataIndexMap = IndexSet; #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(pub u32); @@ -54,12 +54,6 @@ impl std::fmt::Debug for TokenId { } } -impl TokenId { - pub const DEF_SITE: Self = TokenId(0); - pub const CALL_SITE: Self = TokenId(0); - pub const MIXED_SITE: Self = TokenId(0); -} - impl tt::Span for TokenId { const DUMMY: Self = TokenId(!0); diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 32a07a84777bf..dd327681c4c02 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -31,9 +31,8 @@ use std::{ time::SystemTime, }; -use ::tt::Span; use proc_macro_api::{ - msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS}, + msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION}, ProcMacroKind, }; From f48fa0c6cba2d442523daced9db09576e4b11e78 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 29 Nov 2023 15:48:40 +0100 Subject: [PATCH 17/80] Re-implement syntax fixups --- crates/base-db/src/span.rs | 2 - crates/hir-expand/src/db.rs | 103 ++++-- crates/hir-expand/src/fixup.rs | 206 ++++++----- crates/hir-expand/src/lib.rs | 13 +- .../src/handlers/unresolved_module.rs | 2 +- crates/mbe/src/lib.rs | 2 +- crates/mbe/src/syntax_bridge.rs | 344 +++++++++++------- 7 files changed, 408 insertions(+), 264 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 420c669641cc4..dbccbb382a7e8 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -34,8 +34,6 @@ impl SyntaxContextId { // we need a special value that behaves as the current context. pub const SELF_REF: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); - // Used for syntax fixups - pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) }); pub fn is_root(self) -> bool { self == Self::ROOT diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 00a7b0cb3fef4..0135d97f1cd14 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -9,6 +9,7 @@ use base_db::{ use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; +use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasAttrs, HasDocComments}, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, @@ -20,6 +21,7 @@ use crate::{ attrs::RawAttrs, builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, + fixup::{self, SyntaxFixupUndoInfo}, hygiene::{self, SyntaxContextData, Transparency}, span::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, @@ -135,7 +137,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult>, Arc>>; + ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] fn macro_expander(&self, id: MacroDefId) -> TokenExpander; @@ -189,15 +191,33 @@ pub fn expand_speculative( ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - // Build the subtree and token mapping for the speculative args - let _censor = censor_for_macro_input(&loc, speculative_args); let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id); let span_map = SpanMapRef::RealSpanMap(&span_map); - let mut tt = mbe::syntax_node_to_token_tree( - speculative_args, - // we don't leak these spans into any query so its fine to make them absolute - span_map, - ); + + // Build the subtree and token mapping for the speculative args + let (mut tt, undo_info) = match loc.kind { + MacroCallKind::FnLike { .. } => { + (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) + } + MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { + let censor = censor_for_macro_input(&loc, speculative_args); + let mut fixups = fixup::fixup_syntax(span_map, speculative_args); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Node(it) => !censor.contains(it), + syntax::NodeOrToken::Token(_) => true, + }); + fixups.remove.extend(censor); + ( + mbe::syntax_node_to_token_tree_modified( + speculative_args, + span_map, + fixups.append, + fixups.remove, + ), + fixups.undo_info, + ) + } + }; let attr_arg = match loc.kind { MacroCallKind::Attr { invoc_attr_index, .. } => { @@ -227,7 +247,7 @@ pub fn expand_speculative( // Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. - let speculative_expansion = match loc.def.kind { + let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { tt.delimiter = tt::Delimiter::UNSPECIFIED; let call_site = loc.span(db); @@ -261,6 +281,7 @@ pub fn expand_speculative( }; let expand_to = macro_expand_to(db, actual_macro_call); + fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info); let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); let syntax_node = node.syntax_node(); @@ -347,7 +368,9 @@ fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse ValueResult>, Arc>> { + // FIXME: consider the following by putting fixup info into eager call info args + // ) -> ValueResult>, Arc>> { +) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { let mismatched_delimiters = |arg: &SyntaxNode| { let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); @@ -375,7 +398,7 @@ fn macro_arg( .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some(arg.clone())) + ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) } else { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); @@ -404,22 +427,27 @@ fn macro_arg( } MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; - let censor = censor_for_macro_input(&loc, &syntax); - let mut tt = match loc.kind { + let (mut tt, undo_info) = match loc.kind { MacroCallKind::FnLike { .. } => { - mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) } MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { - // let mut fixups = crate::fixup::fixup_syntax(&syntax); - // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - // &node, - // fixups.token_map, - // fixups.next_id, - // fixups.replace, - // fixups.append, - // ); - mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + let censor = censor_for_macro_input(&loc, &syntax); + let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); + fixups.append.retain(|it, _| match it { + syntax::NodeOrToken::Node(it) => !censor.contains(it), + syntax::NodeOrToken::Token(_) => true, + }); + fixups.remove.extend(censor); + ( + mbe::syntax_node_to_token_tree_modified( + &syntax, + map, + fixups.append, + fixups.remove, + ), + fixups.undo_info, + ) } }; @@ -430,15 +458,15 @@ fn macro_arg( if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { match parse.errors() { - [] => ValueResult::ok(Some(Arc::new(tt))), + [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), errors => ValueResult::new( - Some(Arc::new(tt)), + Some((Arc::new(tt), undo_info)), // Box::<[_]>::from(res.errors()), not stable yet Arc::new(errors.to_vec().into_boxed_slice()), ), } } else { - ValueResult::ok(Some(Arc::new(tt))) + ValueResult::ok(Some((Arc::new(tt), undo_info))) } } } @@ -447,7 +475,7 @@ fn macro_arg( /// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - attributes expect the invoking attribute to be stripped -fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec { +fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { @@ -574,13 +602,13 @@ fn macro_expand( let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() }; let node = ast_id.to_ptr(db).to_node(&root); - // FIXME: we might need to remove the spans from the input to the derive macro here + // FIXME: Use censoring let _censor = censor_for_macro_input(&loc, node.syntax()); expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { let ValueResult { value, err } = db.macro_arg(macro_call_id); - let Some(macro_arg) = value else { + let Some((macro_arg, undo_info)) = value else { return ExpandResult { value: Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, @@ -608,7 +636,7 @@ fn macro_expand( // As such we just return the input subtree here. MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { - value: Arc::new(arg.clone()), + value: macro_arg.clone(), err: err.map(|err| { let mut buf = String::new(); for err in &**err { @@ -624,7 +652,11 @@ fn macro_expand( MacroDefKind::BuiltInEager(it, _) => { it.expand(db, macro_call_id, &arg).map_err(Into::into) } - MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg), + MacroDefKind::BuiltInAttr(it, _) => { + let mut res = it.expand(db, macro_call_id, &arg); + fixup::reverse_fixups(&mut res.value, &undo_info); + res + } _ => unreachable!(), } } @@ -647,9 +679,8 @@ fn macro_expand( } fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { - // FIXME: Syntax fix ups let loc = db.lookup_intern_macro_call(id); - let Some(macro_arg) = db.macro_arg(id).value else { + let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { return ExpandResult { value: Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, @@ -672,7 +703,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult>, - pub(crate) replace: FxHashMap>, + pub(crate) remove: FxHashSet, pub(crate) undo_info: SyntaxFixupUndoInfo, } /// This is the information needed to reverse the fixups. -#[derive(Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct SyntaxFixupUndoInfo { - original: Box<[Subtree]>, + // FIXME: ThinArc<[Subtree]> + original: Option>>, +} + +impl SyntaxFixupUndoInfo { + pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None }; } // censoring -> just don't convert the node @@ -39,47 +46,45 @@ pub struct SyntaxFixupUndoInfo { // append -> insert a fake node, here we need to assemble some dummy span that we can figure out how // to remove later -pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { +pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::::default(); - let mut replace = FxHashMap::::default(); + let mut remove = FxHashSet::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); let dummy_range = TextRange::empty(TextSize::new(0)); + // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as + // the index into the replacement vec but only if the end points to !0 let dummy_anchor = - SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; - let fake_span = - SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE }; + SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) }; + let fake_span = |range| SpanData { + range: dummy_range, + anchor: dummy_anchor, + ctx: span_map.span_for_range(range).ctx, + }; while let Some(event) = preorder.next() { let syntax::WalkEvent::Enter(node) = event else { continue }; - /* + let node_range = node.text_range(); if can_handle_error(&node) && has_error_to_handle(&node) { + remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let (original_tree, new_tmap, new_next_id) = - mbe::syntax_node_to_token_tree_with_modifications( - &node, - mem::take(&mut token_map), - next_id, - Default::default(), - Default::default(), - ); - token_map = new_tmap; - next_id = new_next_id; + let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); let idx = original.len() as u32; original.push(original_tree); - let replacement = SyntheticToken { - kind: SyntaxKind::IDENT, + let replacement = Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: node.text_range(), - id: SyntheticTokenId(idx), - }; - replace.insert(node.clone().into(), vec![replacement]); + span: SpanData { + range: TextRange::new(TextSize::new(idx), TextSize::new(!0)), + anchor: dummy_anchor, + ctx: span_map.span_for_range(node_range).ctx, + }, + }); + append.insert(node.clone().into(), vec![replacement]); preorder.skip_subtree(); continue; } - */ + // In some other situations, we can fix things by just appending some tokens. - let end_range = TextRange::empty(node.text_range().end()); match_ast! { match node { ast::FieldExpr(it) => { @@ -88,7 +93,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { append.insert(node.clone().into(), vec![ Leaf::Ident(Ident { text: "__ra_fixup".into(), - span: fake_span + span: fake_span(node_range), }), ]); } @@ -99,7 +104,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: ';', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range), }), ]); } @@ -110,7 +115,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: ';', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -125,7 +130,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { append.insert(if_token.into(), vec![ Leaf::Ident(Ident { text: "__ra_fixup".into(), - span: fake_span + span: fake_span(node_range) }), ]); } @@ -135,12 +140,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: '{', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), Leaf::Punct(Punct { char: '}', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -155,7 +160,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { append.insert(while_token.into(), vec![ Leaf::Ident(Ident { text: "__ra_fixup".into(), - span: fake_span + span: fake_span(node_range) }), ]); } @@ -165,12 +170,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: '{', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), Leaf::Punct(Punct { char: '}', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -182,12 +187,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: '{', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), Leaf::Punct(Punct { char: '}', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -202,7 +207,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { append.insert(match_token.into(), vec![ Leaf::Ident(Ident { text: "__ra_fixup".into(), - span: fake_span + span: fake_span(node_range) }), ]); } @@ -213,12 +218,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: '{', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), Leaf::Punct(Punct { char: '}', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -236,7 +241,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ].map(|text| Leaf::Ident(Ident { text: text.into(), - span: fake_span + span: fake_span(node_range) }), ); @@ -253,12 +258,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { Leaf::Punct(Punct { char: '{', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), Leaf::Punct(Punct { char: '}', spacing: Spacing::Alone, - span: fake_span + span: fake_span(node_range) }), ]); } @@ -267,10 +272,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { } } } + let needs_fixups = !append.is_empty() || !original.is_empty(); SyntaxFixups { append, - replace, - undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, + remove, + undo_info: SyntaxFixupUndoInfo { + original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())), + }, } } @@ -287,42 +295,55 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { } pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { + let Some(undo_info) = undo_info.original.as_deref() else { return }; + let undo_info = &**undo_info; + reverse_fixups_(tt, undo_info); +} + +fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { let tts = std::mem::take(&mut tt.token_trees); tt.token_trees = tts .into_iter() // delete all fake nodes .filter(|tt| match tt { - tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE, - tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE, + tt::TokenTree::Leaf(leaf) => { + let span = leaf.span(); + span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0) + } + tt::TokenTree::Subtree(_) => true, + }) + .flat_map(|tt| match tt { + tt::TokenTree::Subtree(mut tt) => { + reverse_fixups_(&mut tt, undo_info); + SmallVec::from_const([tt.into()]) + } + tt::TokenTree::Leaf(leaf) => { + if leaf.span().anchor.file_id == FileId(!0) { + let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); + if original.delimiter.kind == tt::DelimiterKind::Invisible { + original.token_trees.into() + } else { + SmallVec::from_const([original.into()]) + } + } else { + SmallVec::from_const([leaf.into()]) + } + } }) - // .flat_map(|tt| match tt { - // tt::TokenTree::Subtree(mut tt) => { - // reverse_fixups(&mut tt, undo_info); - // SmallVec::from_const([tt.into()]) - // } - // tt::TokenTree::Leaf(leaf) => { - // if let Some(id) = leaf.span().anchor { - // let original = undo_info.original[id.0 as usize].clone(); - // if original.delimiter.kind == tt::DelimiterKind::Invisible { - // original.token_trees.into() - // } else { - // SmallVec::from_const([original.into()]) - // } - // } else { - // SmallVec::from_const([leaf.into()]) - // } - // } - // }) .collect(); } #[cfg(test)] mod tests { + use base_db::FileId; use expect_test::{expect, Expect}; + use triomphe::Arc; - use crate::tt; - - use super::reverse_fixups; + use crate::{ + fixup::reverse_fixups, + span::{RealSpanMap, SpanMap}, + tt, + }; // The following three functions are only meant to check partial structural equivalence of // `TokenTree`s, see the last assertion in `check()`. @@ -352,13 +373,13 @@ mod tests { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); - let fixups = super::fixup_syntax(&parsed.syntax_node()); - let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0)))); + let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); + let mut tt = mbe::syntax_node_to_token_tree_modified( &parsed.syntax_node(), - fixups.token_map, - fixups.next_id, - fixups.replace, + span_map.as_ref(), fixups.append, + fixups.remove, ); let actual = format!("{tt}\n"); @@ -374,14 +395,15 @@ mod tests { parse.syntax_node() ); - reverse_fixups(&mut tt, &tmap, &fixups.undo_info); + reverse_fixups(&mut tt, &fixups.undo_info); // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node()); + let original_as_tt = + mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); assert!( check_subtree_eq(&tt, &original_as_tt), - "different token tree: {tt:?},\n{original_as_tt:?}" + "different token tree:\n{tt:?}\n\n{original_as_tt:?}" ); } @@ -394,7 +416,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {for _ in __ra_fixup {}} +fn foo () {for _ in __ra_fixup { }} "#]], ) } @@ -422,7 +444,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {for bar in qux {}} +fn foo () {for bar in qux { }} "#]], ) } @@ -453,7 +475,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {}} +fn foo () {match __ra_fixup { }} "#]], ) } @@ -485,7 +507,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {match __ra_fixup {}} +fn foo () {match __ra_fixup { }} "#]], ) } @@ -600,7 +622,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if a {}} +fn foo () {if a { }} "#]], ) } @@ -614,7 +636,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if __ra_fixup {}} +fn foo () {if __ra_fixup { }} "#]], ) } @@ -628,7 +650,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {if __ra_fixup {} {}} +fn foo () {if __ra_fixup {} { }} "#]], ) } @@ -642,7 +664,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {while __ra_fixup {}} +fn foo () {while __ra_fixup { }} "#]], ) } @@ -656,7 +678,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {while foo {}} +fn foo () {while foo { }} "#]], ) } @@ -683,7 +705,7 @@ fn foo() { } "#, expect![[r#" -fn foo () {loop {}} +fn foo () {loop { }} "#]], ) } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 9027ea1c27b45..491c5e638ee15 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -20,7 +20,7 @@ pub mod mod_path; pub mod attrs; pub mod span; pub mod files; -// mod fixup; +mod fixup; use triomphe::Arc; @@ -42,6 +42,7 @@ use crate::{ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::TokenExpander, + fixup::SyntaxFixupUndoInfo, mod_path::ModPath, proc_macro::ProcMacroExpander, span::{ExpansionSpanMap, SpanMap}, @@ -695,8 +696,14 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }) + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: Vec::new(), + }), + SyntaxFixupUndoInfo::NONE, + ) }); let def = loc.def.ast_id().left().and_then(|id| { diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 234ca2a7671bd..4f81ec0512589 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -150,7 +150,7 @@ mod baz {} ], ), main_node: Some( - InFile { + InFileWrapper { file_id: FileId( 0, ), diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index fdf97ad538cbd..c19112b4c5404 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -36,7 +36,7 @@ pub use crate::{ syntax_bridge::{ map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, syntax_node_to_token_tree, - syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper, + syntax_node_to_token_tree_modified, token_tree_to_syntax_node, SpanMapper, }, token_map::TokenMap, }; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index c61c52628607e..be5eafd014aff 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,6 +1,7 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. -use stdx::non_empty_vec::NonEmptyVec; +use rustc_hash::{FxHashMap, FxHashSet}; +use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, @@ -74,14 +75,15 @@ where Ctx: SyntaxContext, SpanMap: SpanMapper>, { - let mut c = Converter::new(node, vec![], map); + let mut c = Converter::new(node, map, Default::default(), Default::default()); convert_tokens(&mut c) } -pub fn syntax_node_to_token_tree_censored( +pub fn syntax_node_to_token_tree_modified( node: &SyntaxNode, map: SpanMap, - censored: Vec, + append: FxHashMap>>>, + remove: FxHashSet, ) -> tt::Subtree> where SpanMap: SpanMapper>, @@ -89,7 +91,7 @@ where Anchor: Copy, Ctx: SyntaxContext, { - let mut c = Converter::new(node, censored, map); + let mut c = Converter::new(node, map, append, remove); convert_tokens(&mut c) } @@ -237,102 +239,105 @@ where while let Some((token, abs_range)) = conv.bump() { let tt::Subtree { delimiter, token_trees: result } = stack.last_mut(); - let kind = token.kind(conv); - - let tt = match kind { - // Desugar doc comments into doc attributes - COMMENT => { - let span = conv.span_for(abs_range); - if let Some(tokens) = conv.convert_doc_comment(&token, span) { - result.extend(tokens); - } - continue; - } - _ if kind.is_punct() && kind != UNDERSCORE => { - let expected = match delimiter.kind { - tt::DelimiterKind::Parenthesis => Some(T![')']), - tt::DelimiterKind::Brace => Some(T!['}']), - tt::DelimiterKind::Bracket => Some(T![']']), - tt::DelimiterKind::Invisible => None, - }; - - // Current token is a closing delimiter that we expect, fix up the closing span - // and end the subtree here - if matches!(expected, Some(expected) if expected == kind) { - if let Some(mut subtree) = stack.pop() { - subtree.delimiter.close = conv.span_for(abs_range); - stack.last_mut().token_trees.push(subtree.into()); + let tt = match token.as_leaf() { + Some(leaf) => tt::TokenTree::Leaf(leaf.clone()), + None => match token.kind(conv) { + // Desugar doc comments into doc attributes + COMMENT => { + let span = conv.span_for(abs_range); + if let Some(tokens) = conv.convert_doc_comment(&token, span) { + result.extend(tokens); } continue; } + kind if kind.is_punct() && kind != UNDERSCORE => { + let expected = match delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + }; - let delim = match kind { - T!['('] => Some(tt::DelimiterKind::Parenthesis), - T!['{'] => Some(tt::DelimiterKind::Brace), - T!['['] => Some(tt::DelimiterKind::Bracket), - _ => None, - }; - - // Start a new subtree - if let Some(kind) = delim { - let open = conv.span_for(abs_range); - stack.push(tt::Subtree { - delimiter: tt::Delimiter { - open, - // will be overwritten on subtree close above - close: open, - kind, - }, - token_trees: vec![], - }); - continue; - } + // Current token is a closing delimiter that we expect, fix up the closing span + // and end the subtree here + if matches!(expected, Some(expected) if expected == kind) { + if let Some(mut subtree) = stack.pop() { + subtree.delimiter.close = conv.span_for(abs_range); + stack.last_mut().token_trees.push(subtree.into()); + } + continue; + } - let spacing = match conv.peek().map(|next| next.kind(conv)) { - Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, - _ => tt::Spacing::Alone, - }; - let Some(char) = token.to_char(conv) else { - panic!("Token from lexer must be single char: token = {token:#?}") - }; - tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }).into() - } - _ => { - macro_rules! make_leaf { - ($i:ident) => { - tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }.into() + let delim = match kind { + T!['('] => Some(tt::DelimiterKind::Parenthesis), + T!['{'] => Some(tt::DelimiterKind::Brace), + T!['['] => Some(tt::DelimiterKind::Bracket), + _ => None, }; - } - let leaf: tt::Leaf<_> = match kind { - T![true] | T![false] => make_leaf!(Ident), - IDENT => make_leaf!(Ident), - UNDERSCORE => make_leaf!(Ident), - k if k.is_keyword() => make_leaf!(Ident), - k if k.is_literal() => make_leaf!(Literal), - LIFETIME_IDENT => { - let apostrophe = tt::Leaf::from(tt::Punct { - char: '\'', - spacing: tt::Spacing::Joint, - span: conv - .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))), - }); - result.push(apostrophe.into()); - - let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text(conv)[1..]), - span: conv.span_for(TextRange::at( - abs_range.start() + TextSize::of('\''), - abs_range.end(), - )), + + // Start a new subtree + if let Some(kind) = delim { + let open = conv.span_for(abs_range); + stack.push(tt::Subtree { + delimiter: tt::Delimiter { + open, + // will be overwritten on subtree close above + close: open, + kind, + }, + token_trees: vec![], }); - result.push(ident.into()); continue; } - _ => continue, - }; - leaf.into() - } + let spacing = match conv.peek().map(|next| next.kind(conv)) { + Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint, + _ => tt::Spacing::Alone, + }; + let Some(char) = token.to_char(conv) else { + panic!("Token from lexer must be single char: token = {token:#?}") + }; + tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }) + .into() + } + kind => { + macro_rules! make_leaf { + ($i:ident) => { + tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) } + .into() + }; + } + let leaf: tt::Leaf<_> = match kind { + T![true] | T![false] => make_leaf!(Ident), + IDENT => make_leaf!(Ident), + UNDERSCORE => make_leaf!(Ident), + k if k.is_keyword() => make_leaf!(Ident), + k if k.is_literal() => make_leaf!(Literal), + LIFETIME_IDENT => { + let apostrophe = tt::Leaf::from(tt::Punct { + char: '\'', + spacing: tt::Spacing::Joint, + span: conv + .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))), + }); + result.push(apostrophe.into()); + + let ident = tt::Leaf::from(tt::Ident { + text: SmolStr::new(&token.to_text(conv)[1..]), + span: conv.span_for(TextRange::at( + abs_range.start() + TextSize::of('\''), + abs_range.end(), + )), + }); + result.push(ident.into()); + continue; + } + _ => continue, + }; + + leaf.into() + } + }, }; result.push(tt); @@ -470,16 +475,20 @@ struct StaticRawConverter<'a, S> { span: S, } -trait SrcToken: std::fmt::Debug { +trait SrcToken: std::fmt::Debug { fn kind(&self, ctx: &Ctx) -> SyntaxKind; fn to_char(&self, ctx: &Ctx) -> Option; fn to_text(&self, ctx: &Ctx) -> SmolStr; + + fn as_leaf(&self) -> Option<&tt::Leaf> { + None + } } trait TokenConverter: Sized { - type Token: SrcToken; + type Token: SrcToken; fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>>; @@ -490,7 +499,7 @@ trait TokenConverter: Sized { fn span_for(&self, range: TextRange) -> S; } -impl SrcToken> for usize { +impl SrcToken, S> for usize { fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind { ctx.lexed.kind(*self) } @@ -504,7 +513,7 @@ impl SrcToken> for usize { } } -impl SrcToken> for usize { +impl SrcToken, S> for usize { fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { ctx.lexed.kind(*self) } @@ -593,32 +602,79 @@ where } } -struct Converter { +struct Converter { current: Option, + current_leafs: Vec>, preorder: PreorderWithTokens, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, /// Used to make the emitted text ranges in the spans relative to the span anchor. map: SpanMap, - censored: Vec, -} - -impl Converter { - fn new(node: &SyntaxNode, censored: Vec, map: SpanMap) -> Self { - let range = node.text_range(); - let mut preorder = node.preorder_with_tokens(); - let first = Self::next_token(&mut preorder, &censored); - Converter { current: first, preorder, range, punct_offset: None, censored, map } - } - - fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option { - while let Some(ev) = preorder.next() { + append: FxHashMap>>, + remove: FxHashSet, +} + +impl Converter { + fn new( + node: &SyntaxNode, + map: SpanMap, + append: FxHashMap>>, + remove: FxHashSet, + ) -> Self { + let mut this = Converter { + current: None, + preorder: node.preorder_with_tokens(), + range: node.text_range(), + punct_offset: None, + map, + append, + remove, + current_leafs: vec![], + }; + let first = this.next_token(); + this.current = first; + this + } + + fn next_token(&mut self) -> Option { + // while let Some(ev) = self.preorder.next() { + // match ev { + // WalkEvent::Enter(SyntaxElement::Token(t)) => { + // if let Some(leafs) = self.append.remove(&t.clone().into()) { + // self.current_leafs.extend(leafs); + // } + // return Some(t); + // } + // WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { + // self.preorder.skip_subtree(); + // if let Some(leafs) = self.append.remove(&n.into()) { + // self.current_leafs.extend(leafs); + // } + // } + // _ => (), + // } + // } + // None; + + while let Some(ev) = self.preorder.next() { match ev { WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t), - WalkEvent::Enter(SyntaxElement::Node(n)) if censor.contains(&n) => { - preorder.skip_subtree() + WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => { + self.preorder.skip_subtree(); + if let Some(mut v) = self.append.remove(&n.into()) { + v.reverse(); + self.current_leafs.extend(v); + return None; + } + } + WalkEvent::Enter(SyntaxElement::Node(_)) => (), + WalkEvent::Leave(ele) => { + if let Some(mut v) = self.append.remove(&ele) { + v.reverse(); + self.current_leafs.extend(v); + return None; + } } - _ => (), } } None @@ -626,45 +682,62 @@ impl Converter { } #[derive(Debug)] -enum SynToken { +enum SynToken { Ordinary(SyntaxToken), - Punct(SyntaxToken, usize), + Punct { token: SyntaxToken, offset: usize }, + Leaf(tt::Leaf), } -impl SynToken { +impl SynToken { fn token(&self) -> &SyntaxToken { match self { - SynToken::Ordinary(it) | SynToken::Punct(it, _) => it, + SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it, + SynToken::Leaf(_) => unreachable!(), } } } -impl SrcToken> for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { +impl SrcToken, S> for SynToken { + fn kind(&self, ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Leaf(_) => { + never!(); + SyntaxKind::ERROR + } } } - fn to_char(&self, _ctx: &Converter) -> Option { + fn to_char(&self, _ctx: &Converter) -> Option { match self { SynToken::Ordinary(_) => None, - SynToken::Punct(it, i) => it.text().chars().nth(*i), + SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i), + SynToken::Leaf(_) => None, + } + } + fn to_text(&self, _ctx: &Converter) -> SmolStr { + match self { + SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(), + SynToken::Leaf(_) => { + never!(); + "".into() + } } } - fn to_text(&self, _ctx: &Converter) -> SmolStr { + fn as_leaf(&self) -> Option<&tt::Leaf> { match self { - SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(), + SynToken::Ordinary(_) | SynToken::Punct { .. } => None, + SynToken::Leaf(it) => Some(it), } } } -impl TokenConverter for Converter +impl TokenConverter for Converter where S: Span, SpanMap: SpanMapper, { - type Token = SynToken; + type Token = SynToken; fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>> { convert_doc_comment(token.token(), span) } @@ -676,20 +749,31 @@ where let range = punct.text_range(); self.punct_offset = Some((punct.clone(), offset)); let range = TextRange::at(range.start() + offset, TextSize::of('.')); - return Some((SynToken::Punct(punct, u32::from(offset) as usize), range)); + return Some(( + SynToken::Punct { token: punct, offset: u32::from(offset) as usize }, + range, + )); + } + } + + if let Some(leaf) = self.current_leafs.pop() { + if self.current_leafs.is_empty() { + self.current = self.next_token(); } + return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0)))); } let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; } - self.current = Self::next_token(&mut self.preorder, &self.censored); + + self.current = self.next_token(); let token = if curr.kind().is_punct() { self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - (SynToken::Punct(curr, 0 as usize), range) + (SynToken::Punct { token: curr, offset: 0 as usize }, range) } else { self.punct_offset = None; let range = curr.text_range(); @@ -703,7 +787,7 @@ where if let Some((punct, mut offset)) = self.punct_offset.clone() { offset += TextSize::of('.'); if usize::from(offset) < punct.text().len() { - return Some(SynToken::Punct(punct, usize::from(offset))); + return Some(SynToken::Punct { token: punct, offset: usize::from(offset) }); } } @@ -713,7 +797,7 @@ where } let token = if curr.kind().is_punct() { - SynToken::Punct(curr, 0 as usize) + SynToken::Punct { token: curr, offset: 0 as usize } } else { SynToken::Ordinary(curr) }; From 0003e568cacaa78d043cde29610182b931100109 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 1 Dec 2023 13:56:25 +0100 Subject: [PATCH 18/80] Pass calling span through to builtin macro expansions --- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/lib.rs | 8 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 28 +- crates/hir-expand/src/attrs.rs | 4 +- crates/hir-expand/src/builtin_attr_macro.rs | 30 +- crates/hir-expand/src/builtin_derive_macro.rs | 363 ++++++++++-------- crates/hir-expand/src/builtin_fn_macro.rs | 158 +++++--- crates/hir-expand/src/db.rs | 35 +- crates/hir-expand/src/eager.rs | 13 +- crates/hir-expand/src/lib.rs | 5 +- crates/hir-expand/src/proc_macro.rs | 18 +- crates/hir-expand/src/quote.rs | 150 ++++---- crates/load-cargo/src/lib.rs | 5 +- crates/mbe/src/expander.rs | 2 +- crates/mbe/src/expander/matcher.rs | 11 +- crates/mbe/src/expander/transcriber.rs | 22 +- crates/mbe/src/lib.rs | 6 +- crates/mbe/src/syntax_bridge.rs | 45 +-- crates/mbe/src/tt_iter.rs | 2 +- crates/proc-macro-api/src/msg.rs | 4 +- crates/proc-macro-api/src/msg/flat.rs | 10 - crates/proc-macro-srv/src/server.rs | 24 +- crates/proc-macro-srv/src/tests/mod.rs | 98 ++--- crates/proc-macro-srv/src/tests/utils.rs | 11 +- crates/tt/src/lib.rs | 61 ++- crates/vfs/src/lib.rs | 5 + 27 files changed, 624 insertions(+), 498 deletions(-) diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 9986870d9dce6..2af4622a07243 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> { self.module_id.local_id, MacroCallKind::Attr { ast_id, - attr_args: Arc::new(tt::Subtree::empty()), + attr_args: None, invoc_attr_index: attr.id, }, attr.path().clone(), diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index f9374347c264d..35fb10e465e0f 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -1351,11 +1351,11 @@ fn attr_macro_as_call_id( let arg = match macro_attr.input.as_deref() { Some(AttrInput::TokenTree(tt)) => { let mut tt = tt.as_ref().clone(); - tt.delimiter = tt::Delimiter::UNSPECIFIED; - tt + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; + Some(tt) } - _ => tt::Subtree::empty(), + _ => None, }; def.as_lazy_macro( @@ -1363,7 +1363,7 @@ fn attr_macro_as_call_id( krate, MacroCallKind::Attr { ast_id: item_attr.ast_id, - attr_args: Arc::new(arg), + attr_args: arg.map(Arc::new), invoc_attr_index: macro_attr.id, }, macro_attr.ctxt, diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 39079685a4d92..9f0d3938b7ea0 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -138,7 +138,7 @@ macro_rules! identity { } fn main(foo: ()) { - builtin#FileId(0):0@0..0\0# ##FileId(0):0@0..0\0#format_args#FileId(0):0@0..0\0# (#FileId(0):3@56..57\0#"{} {} {}"#FileId(0):3@57..67\0#,#FileId(0):3@67..68\0# format_args#FileId(0):3@69..80\0#!#FileId(0):3@80..81\0#(#FileId(0):3@81..82\0#"{}"#FileId(0):3@82..86\0#,#FileId(0):3@86..87\0# 0#FileId(0):3@88..89\0#)#FileId(0):3@89..90\0#,#FileId(0):3@90..91\0# foo#FileId(0):3@92..95\0#,#FileId(0):3@95..96\0# identity#FileId(0):3@97..105\0#!#FileId(0):3@105..106\0#(#FileId(0):3@106..107\0#10#FileId(0):3@107..109\0#)#FileId(0):3@109..110\0#,#FileId(0):3@110..111\0# "bar"#FileId(0):3@112..117\0#)#FileId(0):3@117..118\0# + builtin#FileId(0):3@23..118\3# ##FileId(0):3@23..118\3#format_args#FileId(0):3@23..118\3# (#FileId(0):3@56..57\0#"{} {} {}"#FileId(0):3@57..67\0#,#FileId(0):3@67..68\0# format_args#FileId(0):3@69..80\0#!#FileId(0):3@80..81\0#(#FileId(0):3@81..82\0#"{}"#FileId(0):3@82..86\0#,#FileId(0):3@86..87\0# 0#FileId(0):3@88..89\0#)#FileId(0):3@89..90\0#,#FileId(0):3@90..91\0# foo#FileId(0):3@92..95\0#,#FileId(0):3@95..96\0# identity#FileId(0):3@97..105\0#!#FileId(0):3@105..106\0#(#FileId(0):3@106..107\0#10#FileId(0):3@107..109\0#)#FileId(0):3@109..110\0#,#FileId(0):3@110..111\0# "bar"#FileId(0):3@112..117\0#)#FileId(0):3@117..118\0# } "##]], diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 599010e542ca1..b3a10a3869a43 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -5,7 +5,6 @@ use std::{cmp::Ordering, iter, mem}; -use ::tt::Span; use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId}; use cfg::{CfgExpr, CfgOptions}; use either::Either; @@ -85,7 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI .enumerate() .map(|(idx, it)| { // FIXME: a hacky way to create a Name from string. - let name = tt::Ident { text: it.name.clone(), span: tt::SpanData::DUMMY }; + let name = tt::Ident { + text: it.name.clone(), + span: tt::SpanData { + range: syntax::TextRange::empty(syntax::TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContextId::ROOT, + }, + }; (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) }) .collect()) @@ -476,7 +485,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, - attr_args: Arc::new(tt::Subtree::empty()), + attr_args: None, invoc_attr_index: attr.id, }, attr.path().clone(), @@ -2079,7 +2088,18 @@ impl ModCollector<'_, '_> { let name = match attrs.by_key("rustc_builtin_macro").string_value() { Some(it) => { // FIXME: a hacky way to create a Name from string. - name = tt::Ident { text: it.clone(), span: tt::SpanData::DUMMY }.as_name(); + name = tt::Ident { + text: it.clone(), + span: tt::SpanData { + range: syntax::TextRange::empty(syntax::TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContextId::ROOT, + }, + } + .as_name(); &name } None => { diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 23a8fffa8c299..edaf2f06a4e86 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -130,7 +130,7 @@ impl RawAttrs { let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map( |(idx, attr)| { let tree = Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: attr.to_vec(), }; Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) @@ -296,7 +296,7 @@ impl Attr { // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // here or maybe just parse a mod path from a token tree directly let subtree = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: tts.to_vec(), }; let (parse, span_map) = diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index c16b881df826d..de58a495fef4f 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,18 +1,22 @@ //! Builtin attributes. -use ::tt::Span; +use base_db::{ + span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, +}; +use syntax::{TextRange, TextSize}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { - ( $(($name:ident, $variant:ident) => $expand:ident),* ) => { + ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BuiltinAttrExpander { $($variant),* } impl BuiltinAttrExpander { - pub fn expand( + pub fn $expand_fn( &self, db: &dyn ExpandDatabase, id: MacroCallId, @@ -47,7 +51,7 @@ impl BuiltinAttrExpander { } } -register_builtin! { +register_builtin! { expand: (bench, Bench) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, @@ -99,21 +103,31 @@ fn derive_attr_expand( ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let derives = match &loc.kind { - MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => attr_args, - _ => return ExpandResult::ok(tt::Subtree::empty()), + MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { + attr_args + } + _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), }; - pseudo_derive_attr_expansion(tt, derives) + pseudo_derive_attr_expansion(tt, derives, loc.call_site) } pub fn pseudo_derive_attr_expansion( tt: &tt::Subtree, args: &tt::Subtree, + call_site: SyntaxContextId, ) -> ExpandResult { let mk_leaf = |char| { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - span: tt::SpanData::DUMMY, + span: tt::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: base_db::span::SpanAnchor { + file_id: FileId::BOGUS, + ast_id: ROOT_ERASED_FILE_AST_ID, + }, + ctx: call_site, + }, })) }; diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index e9d137d990072..410aa4d289ebc 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -1,6 +1,5 @@ //! Builtin derives. -use ::tt::Span; use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin}; use itertools::izip; use rustc_hash::FxHashSet; @@ -74,19 +73,19 @@ enum VariantShape { Unit, } -fn tuple_field_iterator(n: usize) -> impl Iterator { - (0..n).map(|it| tt::Ident::new(format!("f{it}"), tt::SpanData::DUMMY)) +fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator { + (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span)) } impl VariantShape { - fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree { - self.as_pattern_map(path, |it| quote!(#it)) + fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree { + self.as_pattern_map(path, span, |it| quote!(span => #it)) } - fn field_names(&self) -> Vec { + fn field_names(&self, span: SpanData) -> Vec { match self { VariantShape::Struct(s) => s.clone(), - VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(), + VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(), VariantShape::Unit => vec![], } } @@ -94,26 +93,27 @@ impl VariantShape { fn as_pattern_map( &self, path: tt::Subtree, + span: SpanData, field_map: impl Fn(&tt::Ident) -> tt::Subtree, ) -> tt::Subtree { match self { VariantShape::Struct(fields) => { let fields = fields.iter().map(|it| { let mapped = field_map(it); - quote! { #it : #mapped , } + quote! {span => #it : #mapped , } }); - quote! { + quote! {span => #path { ##fields } } } &VariantShape::Tuple(n) => { - let fields = tuple_field_iterator(n).map(|it| { + let fields = tuple_field_iterator(span, n).map(|it| { let mapped = field_map(&it); - quote! { + quote! {span => #mapped , } }); - quote! { + quote! {span => #path ( ##fields ) } } @@ -143,17 +143,17 @@ enum AdtShape { } impl AdtShape { - fn as_pattern(&self, name: &tt::Ident) -> Vec { - self.as_pattern_map(name, |it| quote!(#it)) + fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec { + self.as_pattern_map(name, |it| quote!(span =>#it), span) } - fn field_names(&self) -> Vec> { + fn field_names(&self, span: SpanData) -> Vec> { match self { AdtShape::Struct(s) => { - vec![s.field_names()] + vec![s.field_names(span)] } AdtShape::Enum { variants, .. } => { - variants.iter().map(|(_, fields)| fields.field_names()).collect() + variants.iter().map(|(_, fields)| fields.field_names(span)).collect() } AdtShape::Union => { never!("using fields of union in derive is always wrong"); @@ -166,18 +166,21 @@ impl AdtShape { &self, name: &tt::Ident, field_map: impl Fn(&tt::Ident) -> tt::Subtree, + span: SpanData, ) -> Vec { match self { AdtShape::Struct(s) => { - vec![s.as_pattern_map(quote! { #name }, field_map)] + vec![s.as_pattern_map(quote! {span => #name }, span, field_map)] } AdtShape::Enum { variants, .. } => variants .iter() - .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map)) + .map(|(v, fields)| { + fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map) + }) .collect(), AdtShape::Union => { never!("pattern matching on union is always wrong"); - vec![quote! { un }] + vec![quote! {span => un }] } } } @@ -193,7 +196,11 @@ struct BasicAdtInfo { associated_types: Vec, } -fn parse_adt(tm: SpanMapRef<'_>, adt: &ast::Adt) -> Result { +fn parse_adt( + tm: SpanMapRef<'_>, + adt: &ast::Adt, + call_site: SpanData, +) -> Result { let (name, generic_param_list, shape) = match adt { ast::Adt::Struct(it) => ( it.name(), @@ -240,7 +247,9 @@ fn parse_adt(tm: SpanMapRef<'_>, adt: &ast::Adt) -> Result tt::Subtree::empty(), + None => { + tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) + } } }; let bounds = match ¶m { @@ -253,7 +262,9 @@ fn parse_adt(tm: SpanMapRef<'_>, adt: &ast::Adt) -> Result, trait_path: tt::Subtree, make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree, ) -> ExpandResult { - let info = match parse_adt(tm, tt) { + let info = match parse_adt(tm, tt, invoc_span) { Ok(info) => info, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }), + e, + ) + } }; let trait_body = make_trait_body(&info); let mut where_block = vec![]; @@ -357,13 +373,13 @@ fn expand_simple_derive( let ident_ = ident.clone(); if let Some(b) = bound { let ident = ident.clone(); - where_block.push(quote! { #ident : #b , }); + where_block.push(quote! {invoc_span => #ident : #b , }); } if let Some(ty) = param_ty { - (quote! { const #ident : #ty , }, quote! { #ident_ , }) + (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , }) } else { let bound = trait_path.clone(); - (quote! { #ident : #bound , }, quote! { #ident_ , }) + (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , }) } }) .unzip(); @@ -371,17 +387,17 @@ fn expand_simple_derive( where_block.extend(info.associated_types.iter().map(|it| { let it = it.clone(); let bound = trait_path.clone(); - quote! { #it : #bound , } + quote! {invoc_span => #it : #bound , } })); let name = info.name; - let expanded = quote! { + let expanded = quote! {invoc_span => impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body } }; ExpandResult::ok(expanded) } -fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree { +fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree { // FIXME: make hygiene works for builtin derive macro // such that $crate can be used here. let cg = db.crate_graph(); @@ -389,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) { cov_mark::hit!(test_copy_expand_in_core); - quote! { crate } + quote! {span => crate } } else { - quote! { core } + quote! {span => core } }; tt.token_trees[0].clone() @@ -404,8 +420,8 @@ fn copy_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>}) } fn clone_expand( @@ -415,37 +431,35 @@ fn clone_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::clone::Clone }, |adt| { + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { - let star = - tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn clone(&self) -> Self { #star self } }; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = - tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn clone(&self) -> Self { match #star self {} } }; } let name = &adt.name; - let patterns = adt.shape.as_pattern(name); - let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() }); + let patterns = adt.shape.as_pattern(span, name); + let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span); let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| { - let fat_arrow = fat_arrow(); - quote! { + let fat_arrow = fat_arrow(span); + quote! {span => #pat #fat_arrow #expr, } }); - quote! { + quote! {span => fn clone(&self) -> Self { match self { ##arms @@ -455,16 +469,16 @@ fn clone_expand( }) } -/// This function exists since `quote! { => }` doesn't work. -fn fat_arrow() -> tt::Subtree { - let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; - quote! { #eq> } +/// This function exists since `quote! {span => => }` doesn't work. +fn fat_arrow(span: SpanData) -> tt::Subtree { + let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span }; + quote! {span => #eq> } } -/// This function exists since `quote! { && }` doesn't work. -fn and_and() -> tt::Subtree { - let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::SpanData::DUMMY }; - quote! { #and& } +/// This function exists since `quote! {span => && }` doesn't work. +fn and_and(span: SpanData) -> tt::Subtree { + let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span }; + quote! {span => #and& } } fn default_expand( @@ -474,33 +488,37 @@ fn default_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::default::Default }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| { let body = match &adt.shape { AdtShape::Struct(fields) => { let name = &adt.name; - fields - .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default())) + fields.as_pattern_map( + quote!(span =>#name), + span, + |_| quote!(span =>#krate::default::Default::default()), + ) } AdtShape::Enum { default_variant, variants } => { if let Some(d) = default_variant { let (name, fields) = &variants[*d]; let adt_name = &adt.name; fields.as_pattern_map( - quote!(#adt_name :: #name), - |_| quote!(#krate::default::Default::default()), + quote!(span =>#adt_name :: #name), + span, + |_| quote!(span =>#krate::default::Default::default()), ) } else { // FIXME: Return expand error here - quote!() + quote!(span =>) } } AdtShape::Union => { // FIXME: Return expand error here - quote!() + quote!(span =>) } }; - quote! { + quote! {span => fn default() -> Self { #body } @@ -515,38 +533,37 @@ fn debug_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::fmt::Debug }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| { let for_variant = |name: String, v: &VariantShape| match v { VariantShape::Struct(fields) => { let for_fields = fields.iter().map(|it| { let x_string = it.to_string(); - quote! { + quote! {span => .field(#x_string, & #it) } }); - quote! { + quote! {span => f.debug_struct(#name) ##for_fields .finish() } } VariantShape::Tuple(n) => { - let for_fields = tuple_field_iterator(*n).map(|it| { - quote! { + let for_fields = tuple_field_iterator(span, *n).map(|it| { + quote! {span => .field( & #it) } }); - quote! { + quote! {span => f.debug_tuple(#name) ##for_fields .finish() } } - VariantShape::Unit => quote! { + VariantShape::Unit => quote! {span => f.write_str(#name) }, }; if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = - tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match #star self {} } @@ -554,20 +571,20 @@ fn debug_expand( } let arms = match &adt.shape { AdtShape::Struct(fields) => { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let name = &adt.name; - let pat = fields.as_pattern(quote!(#name)); + let pat = fields.as_pattern(quote!(span =>#name), span); let expr = for_variant(name.to_string(), fields); - vec![quote! { #pat #fat_arrow #expr }] + vec![quote! {span => #pat #fat_arrow #expr }] } AdtShape::Enum { variants, .. } => variants .iter() .map(|(name, v)| { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let adt_name = &adt.name; - let pat = v.as_pattern(quote!(#adt_name :: #name)); + let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span); let expr = for_variant(name.to_string(), v); - quote! { + quote! {span => #pat #fat_arrow #expr , } }) @@ -577,7 +594,7 @@ fn debug_expand( vec![] } }; - quote! { + quote! {span => fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { match self { ##arms @@ -594,41 +611,42 @@ fn hash_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::hash::Hash }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote! {}; + return quote! {span =>}; } if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { - let star = - tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span: tt::SpanData::DUMMY }; - return quote! { + let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span }; + return quote! {span => fn hash(&self, ra_expand_state: &mut H) { match #star self {} } }; } - let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map( - |(pat, names)| { - let expr = { - let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); }); - quote! { { - ##it - } } - }; - let fat_arrow = fat_arrow(); - quote! { - #pat #fat_arrow #expr , - } - }, - ); + let arms = + adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map( + |(pat, names)| { + let expr = { + let it = + names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); }); + quote! {span => { + ##it + } } + }; + let fat_arrow = fat_arrow(span); + quote! {span => + #pat #fat_arrow #expr , + } + }, + ); let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) { - quote! { #krate::mem::discriminant(self).hash(ra_expand_state); } + quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); } } else { - quote! {} + quote! {span =>} }; - quote! { + quote! {span => fn hash(&self, ra_expand_state: &mut H) { #check_discriminant match self { @@ -646,8 +664,8 @@ fn eq_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>}) } fn partial_eq_expand( @@ -657,43 +675,43 @@ fn partial_eq_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { + let krate = find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote! {}; + return quote! {span =>}; } let name = &adt.name; - let (self_patterns, other_patterns) = self_and_other_patterns(adt, name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, names)| { - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let body = match &*names { [] => { - quote!(true) + quote!(span =>true) } [first, rest @ ..] => { let rest = rest.iter().map(|it| { let t1 = tt::Ident::new(format!("{}_self", it.text), it.span); let t2 = tt::Ident::new(format!("{}_other", it.text), it.span); - let and_and = and_and(); - quote!(#and_and #t1 .eq( #t2 )) + let and_and = and_and(span); + quote!(span =>#and_and #t1 .eq( #t2 )) }); let first = { let t1 = tt::Ident::new(format!("{}_self", first.text), first.span); let t2 = tt::Ident::new(format!("{}_other", first.text), first.span); - quote!(#t1 .eq( #t2 )) + quote!(span =>#t1 .eq( #t2 )) }; - quote!(#first ##rest) + quote!(span =>#first ##rest) } }; - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); - quote! { + let fat_arrow = fat_arrow(span); + quote! {span => fn eq(&self, other: &Self) -> bool { match (self, other) { ##arms @@ -707,15 +725,24 @@ fn partial_eq_expand( fn self_and_other_patterns( adt: &BasicAdtInfo, name: &tt::Ident, + span: SpanData, ) -> (Vec, Vec) { - let self_patterns = adt.shape.as_pattern_map(name, |it| { - let t = tt::Ident::new(format!("{}_self", it.text), it.span); - quote!(#t) - }); - let other_patterns = adt.shape.as_pattern_map(name, |it| { - let t = tt::Ident::new(format!("{}_other", it.text), it.span); - quote!(#t) - }); + let self_patterns = adt.shape.as_pattern_map( + name, + |it| { + let t = tt::Ident::new(format!("{}_self", it.text), it.span); + quote!(span =>#t) + }, + span, + ); + let other_patterns = adt.shape.as_pattern_map( + name, + |it| { + let t = tt::Ident::new(format!("{}_other", it.text), it.span); + quote!(span =>#t) + }, + span, + ); (self_patterns, other_patterns) } @@ -726,17 +753,18 @@ fn ord_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::cmp::Ord }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, + span: SpanData, ) -> tt::Subtree { - let fat_arrow1 = fat_arrow(); - let fat_arrow2 = fat_arrow(); - quote! { + let fat_arrow1 = fat_arrow(span); + let fat_arrow2 = fat_arrow(span); + quote! {span => match #left.cmp(&#right) { #krate::cmp::Ordering::Equal #fat_arrow1 { #rest @@ -747,34 +775,34 @@ fn ord_expand( } if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote!(); + return quote!(span =>); } - let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, fields)| { - let mut body = quote!(#krate::cmp::Ordering::Equal); + let mut body = quote!(span =>#krate::cmp::Ordering::Equal); for f in fields.into_iter().rev() { let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); - body = compare(krate, quote!(#t1), quote!(#t2), body); + body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } - let fat_arrow = fat_arrow(); - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + let fat_arrow = fat_arrow(span); + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); - let mut body = quote! { + let fat_arrow = fat_arrow(span); + let mut body = quote! {span => match (self, other) { ##arms _unused #fat_arrow #krate::cmp::Ordering::Equal } }; if matches!(&adt.shape, AdtShape::Enum { .. }) { - let left = quote!(#krate::intrinsics::discriminant_value(self)); - let right = quote!(#krate::intrinsics::discriminant_value(other)); - body = compare(krate, left, right, body); + let left = quote!(span =>#krate::intrinsics::discriminant_value(self)); + let right = quote!(span =>#krate::intrinsics::discriminant_value(other)); + body = compare(krate, left, right, body, span); } - quote! { + quote! {span => fn cmp(&self, other: &Self) -> #krate::cmp::Ordering { #body } @@ -789,17 +817,18 @@ fn partial_ord_expand( tt: &ast::Adt, tm: SpanMapRef<'_>, ) -> ExpandResult { - let krate = &find_builtin_crate(db, id); - expand_simple_derive(span, tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { + let krate = &find_builtin_crate(db, id, span); + expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, right: tt::Subtree, rest: tt::Subtree, + span: SpanData, ) -> tt::Subtree { - let fat_arrow1 = fat_arrow(); - let fat_arrow2 = fat_arrow(); - quote! { + let fat_arrow1 = fat_arrow(span); + let fat_arrow2 = fat_arrow(span); + quote! {span => match #left.partial_cmp(&#right) { #krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 { #rest @@ -810,37 +839,39 @@ fn partial_ord_expand( } if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here - return quote!(); + return quote!(span =>); } - let left = quote!(#krate::intrinsics::discriminant_value(self)); - let right = quote!(#krate::intrinsics::discriminant_value(other)); + let left = quote!(span =>#krate::intrinsics::discriminant_value(self)); + let right = quote!(span =>#krate::intrinsics::discriminant_value(other)); - let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); - let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( + let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span); + let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map( |(pat1, pat2, fields)| { - let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); + let mut body = + quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); for f in fields.into_iter().rev() { let t1 = tt::Ident::new(format!("{}_self", f.text), f.span); let t2 = tt::Ident::new(format!("{}_other", f.text), f.span); - body = compare(krate, quote!(#t1), quote!(#t2), body); + body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span); } - let fat_arrow = fat_arrow(); - quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } + let fat_arrow = fat_arrow(span); + quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , } }, ); - let fat_arrow = fat_arrow(); + let fat_arrow = fat_arrow(span); let body = compare( krate, left, right, - quote! { + quote! {span => match (self, other) { ##arms _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal) } }, + span, ); - quote! { + quote! {span => fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> { #body } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 459d1c868d808..726b9835369fa 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -1,8 +1,7 @@ //! Builtin macro -use ::tt::Span; use base_db::{ - span::{SpanAnchor, ROOT_ERASED_FILE_AST_ID}, + span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, AnchoredPath, Edition, FileId, }; use cfg::CfgExpr; @@ -15,8 +14,9 @@ use syntax::{ use crate::{ db::ExpandDatabase, + hygiene::span_with_def_site_ctxt, name, quote, - tt::{self}, + tt::{self, DelimSpan}, EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; @@ -42,7 +42,10 @@ macro_rules! register_builtin { let expander = match *self { $( BuiltinFnLikeExpander::$kind => $expand, )* }; - expander(db, id, tt) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, tt, span) } } @@ -50,13 +53,16 @@ macro_rules! register_builtin { pub fn expand( &self, db: &dyn ExpandDatabase, - arg_id: MacroCallId, + id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { let expander = match *self { $( EagerExpander::$e_kind => $e_expand, )* }; - expander(db, arg_id, tt) + + let span = db.lookup_intern_macro_call(id).span(db); + let span = span_with_def_site_ctxt(db, span, id); + expander(db, id, tt, span) } } @@ -115,29 +121,42 @@ register_builtin! { (option_env, OptionEnv) => option_env_expand } -const DOLLAR_CRATE: tt::Ident = - tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::SpanData::DUMMY }; +fn mk_pound(span: SpanData) -> tt::Subtree { + crate::quote::IntoTt::to_subtree( + vec![crate::tt::Leaf::Punct(crate::tt::Punct { + char: '#', + spacing: crate::tt::Spacing::Alone, + span: span, + }) + .into()], + span, + ) +} fn module_path_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // Just return a dummy result. - ExpandResult::ok(quote! { "module::path" }) + ExpandResult::ok(quote! {span => + "module::path" + }) } fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // dummy implementation for type-checking purposes ExpandResult::ok(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: "0u32".into(), - span: tt::SpanData::DUMMY, + span, }))], }) } @@ -146,26 +165,29 @@ fn log_syntax_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn trace_macros_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn stringify_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let pretty = ::tt::pretty(&tt.token_trees); - let expanded = quote! { + let expanded = quote! {span => #pretty }; @@ -176,27 +198,29 @@ fn assert_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let args = parse_exprs_with_sep(tt, ','); + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let expanded = match &*args { [cond, panic_args @ ..] => { let comma = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - span: tt::SpanData::DUMMY, + span, }))], }; let cond = cond.clone(); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); - quote! {{ + quote! {span =>{ if !(#cond) { - #DOLLAR_CRATE::panic!(##panic_args); + #dollar_crate::panic!(##panic_args); } }} } - [] => quote! {{}}, + [] => quote! {span =>{}}, }; ExpandResult::ok(expanded) @@ -206,12 +230,13 @@ fn file_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // FIXME: RA purposefully lacks knowledge of absolute file names // so just return "". let file_name = ""; - let expanded = quote! { + let expanded = quote! {span => #file_name }; @@ -222,16 +247,18 @@ fn format_args_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - format_args_expand_general(db, id, tt, "") + format_args_expand_general(db, id, tt, "", span) } fn format_args_nl_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { - format_args_expand_general(db, id, tt, "\\n") + format_args_expand_general(db, id, tt, "\\n", span) } fn format_args_expand_general( @@ -240,11 +267,12 @@ fn format_args_expand_general( tt: &tt::Subtree, // FIXME: Make use of this so that mir interpretation works properly _end_string: &str, + span: SpanData, ) -> ExpandResult { - let pound = quote! {@PUNCT '#'}; + let pound = mk_pound(span); let mut tt = tt.clone(); tt.delimiter.kind = tt::DelimiterKind::Parenthesis; - return ExpandResult::ok(quote! { + return ExpandResult::ok(quote! {span => builtin #pound format_args #tt }); } @@ -253,25 +281,25 @@ fn asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // We expand all assembly snippets to `format_args!` invocations to get format syntax // highlighting for them. - let mut literals = Vec::new(); for tt in tt.token_trees.chunks(2) { match tt { [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))] | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] => { - let krate = DOLLAR_CRATE.clone(); - literals.push(quote!(#krate::format_args!(#lit);)); + let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; + literals.push(quote!(span=>#dollar_krate::format_args!(#lit);)); } _ => break, } } - let pound = quote! {@PUNCT '#'}; - let expanded = quote! { + let pound = mk_pound(span); + let expanded = quote! {span => builtin #pound asm ( {##literals} ) @@ -283,20 +311,22 @@ fn global_asm_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { // Expand to nothing (at item-level) - ExpandResult::ok(quote! {}) + ExpandResult::ok(quote! {span =>}) } fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(id); let expr = CfgExpr::parse(tt); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false); - let expanded = if enabled { quote!(true) } else { quote!(false) }; + let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) }; ExpandResult::ok(expanded) } @@ -304,13 +334,15 @@ fn panic_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; // Expand to a macro call `$crate::panic::panic_{edition}` let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(#DOLLAR_CRATE::panic::panic_2021!) + quote!(span =>#dollar_crate::panic::panic_2021!) } else { - quote!(#DOLLAR_CRATE::panic::panic_2015!) + quote!(span =>#dollar_crate::panic::panic_2015!) }; // Pass the original arguments @@ -322,13 +354,15 @@ fn unreachable_expand( db: &dyn ExpandDatabase, id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let loc: MacroCallLoc = db.lookup_intern_macro_call(id); // Expand to a macro call `$crate::panic::unreachable_{edition}` + let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { - quote!(#DOLLAR_CRATE::panic::unreachable_2021!) + quote!(span =>#dollar_crate::panic::unreachable_2021!) } else { - quote!(#DOLLAR_CRATE::panic::unreachable_2015!) + quote!(span =>#dollar_crate::panic::unreachable_2015!) }; // Pass the original arguments @@ -358,6 +392,7 @@ fn compile_error_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { @@ -367,13 +402,14 @@ fn compile_error_expand( _ => ExpandError::other("`compile_error!` argument must be a string"), }; - ExpandResult { value: quote! {}, err: Some(err) } + ExpandResult { value: quote! {span =>}, err: Some(err) } } fn concat_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut err = None; let mut text = String::new(); @@ -413,13 +449,14 @@ fn concat_expand( } } } - ExpandResult { value: quote!(#text), err } + ExpandResult { value: quote!(span =>#text), err } } fn concat_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut bytes = Vec::new(); let mut err = None; @@ -452,8 +489,8 @@ fn concat_bytes_expand( } } } - let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::SpanData::DUMMY }; - ExpandResult { value: quote!([#ident]), err } + let ident = tt::Ident { text: bytes.join(", ").into(), span }; + ExpandResult { value: quote!(span =>[#ident]), err } } fn concat_bytes_expand_subtree( @@ -486,6 +523,7 @@ fn concat_idents_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let mut err = None; let mut ident = String::new(); @@ -500,8 +538,9 @@ fn concat_idents_expand( } } } - let ident = tt::Ident { text: ident.into(), span: tt::SpanData::DUMMY }; - ExpandResult { value: quote!(#ident), err } + // FIXME merge spans + let ident = tt::Ident { text: ident.into(), span }; + ExpandResult { value: quote!(span =>#ident), err } } fn relative_file( @@ -537,10 +576,11 @@ fn include_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, _tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { match db.include_expand(arg_id) { Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()), - Err(e) => ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e), } } @@ -559,6 +599,8 @@ pub(crate) fn include_arg_to_tt( // why are we not going through a SyntaxNode here? let subtree = parse_to_token_tree( SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, + // FIXME + SyntaxContextId::ROOT, &db.file_text(file_id), ) .ok_or(mbe::ExpandError::ConversionError)?; @@ -569,17 +611,18 @@ fn include_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { if let Err(e) = parse_string(tt) { - return ExpandResult::new(tt::Subtree::empty(), e); + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e); } // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), - span: tt::SpanData::DUMMY, + span, }))], }; ExpandResult::ok(res) @@ -589,10 +632,13 @@ fn include_str_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let path = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; // FIXME: we're not able to read excluded files (which is most of them because @@ -602,14 +648,14 @@ fn include_str_expand( let file_id = match relative_file(db, arg_id, &path, true) { Ok(file_id) => file_id, Err(_) => { - return ExpandResult::ok(quote!("")); + return ExpandResult::ok(quote!(span =>"")); } }; let text = db.file_text(file_id); let text = &*text; - ExpandResult::ok(quote!(#text)) + ExpandResult::ok(quote!(span =>#text)) } fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option { @@ -621,10 +667,13 @@ fn env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; let mut err = None; @@ -641,7 +690,7 @@ fn env_expand( // `include!("foo.rs"), which might go to infinite loop "UNRESOLVED_ENV_VAR".to_string() }); - let expanded = quote! { #s }; + let expanded = quote! {span => #s }; ExpandResult { value: expanded, err } } @@ -650,15 +699,18 @@ fn option_env_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, tt: &tt::Subtree, + span: SpanData, ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; // FIXME: Use `DOLLAR_CRATE` when that works in eager macros. let expanded = match get_env_inner(db, arg_id, &key) { - None => quote! { ::core::option::Option::None::<&str> }, - Some(s) => quote! { ::core::option::Option::Some(#s) }, + None => quote! {span => ::core::option::Option::None::<&str> }, + Some(s) => quote! {span => ::core::option::Option::Some(#s) }, }; ExpandResult::ok(expanded) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 0135d97f1cd14..7f77a2f51bab0 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,11 +1,6 @@ //! Defines database & queries for macro expansion. -use ::tt::{SpanAnchor as _, SyntaxContext}; -use base_db::{ - salsa, - span::{SpanAnchor, SyntaxContextId}, - CrateId, Edition, FileId, SourceDatabase, -}; +use base_db::{salsa, span::SyntaxContextId, CrateId, Edition, FileId, SourceDatabase}; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; @@ -53,7 +48,7 @@ impl DeclarativeMacroExpander { ) -> ExpandResult { match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan::DUMMY), ExpandError::other(format!("invalid macro definition: {e}")), ), None => self @@ -66,7 +61,7 @@ impl DeclarativeMacroExpander { pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult { match self.mac.err() { Some(e) => ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan::DUMMY), ExpandError::other(format!("invalid macro definition: {e}")), ), None => self.mac.expand(&tt, |_| ()).map_err(Into::into), @@ -191,7 +186,7 @@ pub fn expand_speculative( ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id); + let span_map = RealSpanMap::absolute(FileId::BOGUS); let span_map = SpanMapRef::RealSpanMap(&span_map); // Build the subtree and token mapping for the speculative args @@ -235,7 +230,7 @@ pub fn expand_speculative( match attr.token_tree() { Some(token_tree) => { let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); - tree.delimiter = tt::Delimiter::UNSPECIFIED; + tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; Some(tree) } @@ -249,7 +244,7 @@ pub fn expand_speculative( // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = tt::Delimiter::UNSPECIFIED; + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; let call_site = loc.span(db); expander.expand( db, @@ -263,7 +258,7 @@ pub fn expand_speculative( ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { - pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) + pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site) } MacroDefKind::BuiltInDerive(expander, ..) => { // this cast is a bit sus, can we avoid losing the typedness here? @@ -286,11 +281,7 @@ pub fn expand_speculative( let syntax_node = node.syntax_node(); let token = rev_tmap - .ranges_with_span(tt::SpanData { - range: token_to_map.text_range(), - anchor: SpanAnchor::DUMMY, - ctx: SyntaxContextId::DUMMY, - }) + .ranges_with_span(span_map.span_for_range(token_to_map.text_range())) .filter_map(|range| syntax_node.covering_element(range).into_token()) .min_by_key(|t| { // prefer tokens of the same kind and text @@ -453,7 +444,7 @@ fn macro_arg( if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = tt::Delimiter::UNSPECIFIED; + tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; } if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { @@ -611,7 +602,7 @@ fn macro_expand( let Some((macro_arg, undo_info)) = value else { return ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: Vec::new(), }), // FIXME: We should make sure to enforce an invariant that invalid macro @@ -683,7 +674,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandResult Some(&**attr_args), + MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args), _ => None, }; @@ -749,7 +740,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult> if TOKEN_LIMIT.check(count).is_err() { Err(ExpandResult { value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![], }), err: Some(ExpandError::other(format!( diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index deea59b93b621..aa537af14393d 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,10 +18,7 @@ //! //! //! See the full discussion : -use base_db::{ - span::{SpanAnchor, SyntaxContextId}, - CrateId, -}; +use base_db::{span::SyntaxContextId, CrateId, FileId}; use rustc_hash::FxHashMap; use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; use triomphe::Arc; @@ -79,12 +76,10 @@ pub fn expand_eager_macro_input( }; // FIXME: Spans! - let mut subtree = mbe::syntax_node_to_token_tree( - &expanded_eager_input, - RealSpanMap::absolute(::DUMMY.file_id), - ); + let mut subtree = + mbe::syntax_node_to_token_tree(&expanded_eager_input, RealSpanMap::absolute(FileId::BOGUS)); - subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED; + subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; let loc = MacroCallLoc { def, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 491c5e638ee15..5915fe6e34465 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -61,6 +61,7 @@ pub mod tt { pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor}; pub type Delimiter = ::tt::Delimiter; + pub type DelimSpan = ::tt::DelimSpan; pub type Subtree = ::tt::Subtree; pub type Leaf = ::tt::Leaf; pub type Literal = ::tt::Literal; @@ -160,7 +161,7 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - attr_args: Arc, + attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support @@ -699,7 +700,7 @@ impl ExpansionInfo { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { ( Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: Vec::new(), }), SyntaxFixupUndoInfo::NONE, diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 04b5b7b0b6a3e..ccae4c288e65c 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -38,9 +38,10 @@ impl ProcMacroExpander { mixed_site: SpanData, ) -> ExpandResult { match self.proc_macro_id { - ProcMacroId(DUMMY_ID) => { - ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate)) - } + ProcMacroId(DUMMY_ID) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::UnresolvedProcMacro(def_crate), + ), ProcMacroId(id) => { let proc_macros = db.proc_macros(); let proc_macros = match proc_macros.get(&def_crate) { @@ -48,7 +49,7 @@ impl ProcMacroExpander { Some(Err(_)) | None => { never!("Non-dummy expander even though there are no proc macros"); return ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other("Internal error"), ); } @@ -62,7 +63,7 @@ impl ProcMacroExpander { id ); return ExpandResult::new( - tt::Subtree::empty(), + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::other("Internal error"), ); } @@ -82,9 +83,10 @@ impl ProcMacroExpander { ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) } } ProcMacroExpansionError::System(text) - | ProcMacroExpansionError::Panic(text) => { - ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text)) - } + | ProcMacroExpansionError::Panic(text) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::other(text), + ), }, } } diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 44f20cbd92d25..069bcc3bd8e2f 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -1,5 +1,7 @@ //! A simplified version of quote-crate like quasi quote macro +use base_db::span::SpanData; + // A helper macro quote macro // FIXME: // 1. Not all puncts are handled @@ -8,109 +10,109 @@ #[doc(hidden)] #[macro_export] macro_rules! __quote { - () => { + ($span:ident) => { Vec::::new() }; - ( @SUBTREE $delim:ident $($tt:tt)* ) => { + ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => { { - let children = $crate::__quote!($($tt)*); + let children = $crate::__quote!($span $($tt)*); crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::$delim, - open: ::DUMMY, - close: ::DUMMY, + open: $span, + close: $span, }, token_trees: $crate::quote::IntoTt::to_tokens(children), } } }; - ( @PUNCT $first:literal ) => { + ( @PUNCT($span:ident) $first:literal ) => { { vec![ crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Alone, - span: ::DUMMY, + span: $span, }).into() ] } }; - ( @PUNCT $first:literal, $sec:literal ) => { + ( @PUNCT($span:ident) $first:literal, $sec:literal ) => { { vec![ crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, spacing: crate::tt::Spacing::Joint, - span: ::DUMMY, + span: $span, }).into(), crate::tt::Leaf::Punct(crate::tt::Punct { char: $sec, spacing: crate::tt::Spacing::Alone, - span: ::DUMMY, + span: $span, }).into() ] } }; // hash variable - ( # $first:ident $($tail:tt)* ) => { + ($span:ident # $first:ident $($tail:tt)* ) => { { - let token = $crate::quote::ToTokenTree::to_token($first); + let token = $crate::quote::ToTokenTree::to_token($first, $span); let mut tokens = vec![token.into()]; - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; - ( ## $first:ident $($tail:tt)* ) => { + ($span:ident ## $first:ident $($tail:tt)* ) => { { - let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::>(); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::>(); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens } }; // Brace - ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) }; + ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) }; // Bracket - ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) }; + ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) }; // Parenthesis - ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) }; + ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) }; // Literal - ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] }; + ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] }; // Ident - ( $tt:ident ) => { + ($span:ident $tt:ident ) => { vec![ { crate::tt::Leaf::Ident(crate::tt::Ident { text: stringify!($tt).into(), - span: ::DUMMY, + span: $span, }).into() }] }; // Puncts // FIXME: Not all puncts are handled - ( -> ) => {$crate::__quote!(@PUNCT '-', '>')}; - ( & ) => {$crate::__quote!(@PUNCT '&')}; - ( , ) => {$crate::__quote!(@PUNCT ',')}; - ( : ) => {$crate::__quote!(@PUNCT ':')}; - ( ; ) => {$crate::__quote!(@PUNCT ';')}; - ( :: ) => {$crate::__quote!(@PUNCT ':', ':')}; - ( . ) => {$crate::__quote!(@PUNCT '.')}; - ( < ) => {$crate::__quote!(@PUNCT '<')}; - ( > ) => {$crate::__quote!(@PUNCT '>')}; - ( ! ) => {$crate::__quote!(@PUNCT '!')}; - - ( $first:tt $($tail:tt)+ ) => { + ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')}; + ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')}; + ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')}; + ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')}; + ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')}; + ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')}; + ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')}; + ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')}; + ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')}; + ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')}; + + ($span:ident $first:tt $($tail:tt)+ ) => { { - let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first)); - let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); + let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first )); + let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*)); tokens.append(&mut tail_tokens); tokens @@ -122,19 +124,22 @@ macro_rules! __quote { /// It probably should implement in proc-macro #[macro_export] macro_rules! quote { - ( $($tt:tt)* ) => { - $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*)) + ($span:ident=> $($tt:tt)* ) => { + $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span) } } pub(crate) trait IntoTt { - fn to_subtree(self) -> crate::tt::Subtree; + fn to_subtree(self, span: SpanData) -> crate::tt::Subtree; fn to_tokens(self) -> Vec; } impl IntoTt for Vec { - fn to_subtree(self) -> crate::tt::Subtree { - crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self } + fn to_subtree(self, span: SpanData) -> crate::tt::Subtree { + crate::tt::Subtree { + delimiter: crate::tt::Delimiter::invisible_spanned(span), + token_trees: self, + } } fn to_tokens(self) -> Vec { @@ -143,7 +148,7 @@ impl IntoTt for Vec { } impl IntoTt for crate::tt::Subtree { - fn to_subtree(self) -> crate::tt::Subtree { + fn to_subtree(self, _: SpanData) -> crate::tt::Subtree { self } @@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree { } pub(crate) trait ToTokenTree { - fn to_token(self) -> crate::tt::TokenTree; + fn to_token(self, span: SpanData) -> crate::tt::TokenTree; } impl ToTokenTree for crate::tt::TokenTree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self } } impl ToTokenTree for &crate::tt::TokenTree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self.clone() } } impl ToTokenTree for crate::tt::Subtree { - fn to_token(self) -> crate::tt::TokenTree { + fn to_token(self, _: SpanData) -> crate::tt::TokenTree { self.into() } } macro_rules! impl_to_to_tokentrees { - ($($ty:ty => $this:ident $im:block);*) => { + ($($span:ident: $ty:ty => $this:ident $im:block);*) => { $( impl ToTokenTree for $ty { - fn to_token($this) -> crate::tt::TokenTree { + fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.into(); leaf.into() } } impl ToTokenTree for &$ty { - fn to_token($this) -> crate::tt::TokenTree { + fn to_token($this, $span: SpanData) -> crate::tt::TokenTree { let leaf: crate::tt::Leaf = $im.clone().into(); leaf.into() } @@ -195,41 +200,45 @@ macro_rules! impl_to_to_tokentrees { } impl_to_to_tokentrees! { - u32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; - usize => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; - i32 => self { crate::tt::Literal{text: self.to_string().into(), span: ::DUMMY} }; - bool => self { crate::tt::Ident{text: self.to_string().into(), span: ::DUMMY} }; - crate::tt::Leaf => self { self }; - crate::tt::Literal => self { self }; - crate::tt::Ident => self { self }; - crate::tt::Punct => self { self }; - &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}}; - String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: ::DUMMY}} + span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} }; + span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} }; + _span: crate::tt::Leaf => self { self }; + _span: crate::tt::Literal => self { self }; + _span: crate::tt::Ident => self { self }; + _span: crate::tt::Punct => self { self }; + span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}; + span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}} } #[cfg(test)] mod tests { + use crate::tt; + use ::tt::Span; use expect_test::expect; + const DUMMY: tt::SpanData = tt::SpanData::DUMMY; + #[test] fn test_quote_delimiters() { - assert_eq!(quote!({}).to_string(), "{}"); - assert_eq!(quote!(()).to_string(), "()"); - assert_eq!(quote!([]).to_string(), "[]"); + assert_eq!(quote!(DUMMY =>{}).to_string(), "{}"); + assert_eq!(quote!(DUMMY =>()).to_string(), "()"); + assert_eq!(quote!(DUMMY =>[]).to_string(), "[]"); } #[test] fn test_quote_idents() { - assert_eq!(quote!(32).to_string(), "32"); - assert_eq!(quote!(struct).to_string(), "struct"); + assert_eq!(quote!(DUMMY =>32).to_string(), "32"); + assert_eq!(quote!(DUMMY =>struct).to_string(), "struct"); } #[test] fn test_quote_hash_simple_literal() { let a = 20; - assert_eq!(quote!(#a).to_string(), "20"); + assert_eq!(quote!(DUMMY =>#a).to_string(), "20"); let s: String = "hello".into(); - assert_eq!(quote!(#s).to_string(), "\"hello\""); + assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\""); } fn mk_ident(name: &str) -> crate::tt::Ident { @@ -243,7 +252,7 @@ mod tests { fn test_quote_hash_token_tree() { let a = mk_ident("hello"); - let quoted = quote!(#a); + let quoted = quote!(DUMMY =>#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); expect![[r#" @@ -255,7 +264,7 @@ mod tests { fn test_quote_simple_derive_copy() { let name = mk_ident("Foo"); - let quoted = quote! { + let quoted = quote! {DUMMY => impl Clone for #name { fn clone(&self) -> Self { Self {} @@ -275,7 +284,8 @@ mod tests { // } let struct_name = mk_ident("Foo"); let fields = [mk_ident("name"), mk_ident("id")]; - let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees); + let fields = + fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees); let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { @@ -286,7 +296,7 @@ mod tests { token_trees: fields.collect(), }; - let quoted = quote! { + let quoted = quote! {DUMMY => impl Clone for #struct_name { fn clone(&self) -> Self { Self #list diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index ed4175c458301..db9654220dd74 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -16,6 +16,7 @@ use ide_db::{ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; +use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -417,11 +418,11 @@ impl ProcMacroExpander for EmptyExpander { _: &tt::Subtree, _: Option<&tt::Subtree>, _: &Env, - _: SpanData, + call_site: SpanData, _: SpanData, _: SpanData, ) -> Result, ProcMacroExpansionError> { - Ok(tt::Subtree::empty()) + Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) } } diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 487e8b35980db..0e755f69bf7d3 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -49,7 +49,7 @@ pub(crate) fn expand_rules( ExpandResult { value, err: match_.err.or(transcribe_err) } } else { ExpandResult::new( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }, + tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }, ExpandError::NoMatchingRule, ) } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 14b32599091e4..5e1ceacf12c36 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -76,7 +76,8 @@ impl Bindings { fn push_optional(&mut self, name: &SmolStr) { // FIXME: Do we have a better way to represent an empty token ? // Insert an empty subtree for empty token - let tt = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }.into(); + let tt = + tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into(); self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); } @@ -816,7 +817,7 @@ fn match_meta_var( match neg { None => lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![neg, lit.into()], }), } @@ -849,7 +850,7 @@ impl MetaTemplate { OpDelimitedIter { inner: &self.0, idx: 0, - delimited: delimited.unwrap_or(tt::Delimiter::UNSPECIFIED), + delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE), } } } @@ -947,7 +948,7 @@ impl TtIter<'_, S> { let puncts = self.expect_glued_punct()?; let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); Ok(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees, })) } @@ -964,7 +965,7 @@ impl TtIter<'_, S> { let ident = self.expect_ident_or_underscore()?; Ok(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 8aedd73140635..40f683d846268 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -88,7 +88,7 @@ impl Bindings { // FIXME: Meta and Item should get proper defaults MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![], })) } @@ -292,7 +292,7 @@ fn expand_subtree( let tts = arena.drain(start_elements..).collect(); ExpandResult { value: tt::Subtree { - delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified), + delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible), token_trees: tts, }, err, @@ -325,7 +325,7 @@ fn expand_var( // ``` // We just treat it a normal tokens let tt = tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) .into(), @@ -336,7 +336,10 @@ fn expand_var( ExpandResult::ok(Fragment::Tokens(tt)) } Err(e) => ExpandResult { - value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())), + value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { + open: S::DUMMY, + close: S::DUMMY, + }))), err: Some(e), }, } @@ -378,8 +381,11 @@ fn expand_repeat( ); return ExpandResult { value: Fragment::Tokens( - tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] } - .into(), + tt::Subtree { + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: vec![], + } + .into(), ), err: Some(ExpandError::LimitExceeded), }; @@ -390,7 +396,7 @@ fn expand_repeat( continue; } - t.delimiter = tt::Delimiter::UNSPECIFIED; + t.delimiter = tt::Delimiter::DUMMY_INVISIBLE; push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -424,7 +430,7 @@ fn expand_repeat( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: buf }.into(); + let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index c19112b4c5404..10eb59bb83524 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -34,9 +34,9 @@ pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext}; pub use crate::{ syntax_bridge::{ - map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, - parse_to_token_tree_static_span, syntax_node_to_token_tree, - syntax_node_to_token_tree_modified, token_tree_to_syntax_node, SpanMapper, + parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span, + syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, + SpanMapper, }, token_map::TokenMap, }; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index be5eafd014aff..5722a5bd8eb04 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -4,7 +4,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use stdx::{never, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, - AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, + AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; @@ -142,30 +142,10 @@ where tree_sink.finish() } -pub fn map_from_syntax_node( - node: &SyntaxNode, - anchor: Anchor, - anchor_offset: TextSize, -) -> TokenMap> -where - Anchor: Copy, - SpanData: Span, - Ctx: SyntaxContext, -{ - let mut map = TokenMap::empty(); - node.descendants_with_tokens().filter_map(NodeOrToken::into_token).for_each(|t| { - map.push( - t.text_range().start(), - SpanData { range: t.text_range() - anchor_offset, anchor, ctx: Ctx::DUMMY }, - ); - }); - map.finish(); - map -} - /// Convert a string to a `TokenTree` pub fn parse_to_token_tree( anchor: Anchor, + ctx: Ctx, text: &str, ) -> Option>> where @@ -177,7 +157,7 @@ where if lexed.errors().next().is_some() { return None; } - let mut conv = RawConverter { lexed, pos: 0, anchor }; + let mut conv = RawConverter { lexed, pos: 0, anchor, ctx }; Some(convert_tokens(&mut conv)) } @@ -220,7 +200,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec, S: Span, { - let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }; + let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); while let Some((token, abs_range)) = conv.bump() { @@ -463,10 +443,11 @@ fn convert_doc_comment( } /// A raw token (straight from lexer) converter -struct RawConverter<'a, Anchor> { +struct RawConverter<'a, Anchor, Ctx> { lexed: parser::LexedStr<'a>, pos: usize, anchor: Anchor, + ctx: Ctx, } /// A raw token (straight from lexer) converter that gives every token the same span. struct StaticRawConverter<'a, S> { @@ -499,16 +480,16 @@ trait TokenConverter: Sized { fn span_for(&self, range: TextRange) -> S; } -impl SrcToken, S> for usize { - fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind { +impl SrcToken, S> for usize { + fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'_, Anchor>) -> Option { + fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option { ctx.lexed.text(*self).chars().next() } - fn to_text(&self, ctx: &RawConverter<'_, Anchor>) -> SmolStr { + fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr { ctx.lexed.text(*self).into() } } @@ -528,7 +509,7 @@ impl SrcToken, S> for usize { } impl TokenConverter> - for RawConverter<'_, Anchor> + for RawConverter<'_, Anchor, Ctx> where SpanData: Span, { @@ -563,7 +544,7 @@ where } fn span_for(&self, range: TextRange) -> SpanData { - SpanData { range, anchor: self.anchor, ctx: Ctx::DUMMY } + SpanData { range, anchor: self.anchor, ctx: self.ctx } } } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 44fbbcfc2068b..595691b177368 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -175,7 +175,7 @@ impl<'a, S: Span> TtIter<'a, S> { let res = match res.len() { 0 | 1 => res.pop(), _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter::unspecified(), + delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: res, })), }; diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index ddac514ff7094..dd882d82fb64a 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -62,12 +62,14 @@ pub struct ExpandMacro { pub current_dir: Option, /// marker for serde skip stuff #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] + #[serde(default)] pub has_global_spans: ExpnGlobals, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Default, Debug, Serialize, Deserialize)] pub struct ExpnGlobals { #[serde(skip_serializing)] + #[serde(default)] pub serialize: bool, pub def_site: usize, pub call_site: usize, diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 43840fa333d78..5835718628e55 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -56,16 +56,6 @@ impl std::fmt::Debug for TokenId { impl tt::Span for TokenId { const DUMMY: Self = TokenId(!0); - - type Anchor = (); - - fn anchor(self) -> Self::Anchor { - () - } - - fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self { - Self::DUMMY - } } #[derive(Serialize, Deserialize, Debug)] diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index fc080eccc0f4a..54430e0d19051 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -426,8 +426,6 @@ impl LiteralFormatter { #[cfg(test)] mod tests { - use ::tt::Span; - use super::*; #[test] @@ -436,16 +434,16 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - span: tt::TokenId::DUMMY, + span: tt::TokenId(0), })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - span: tt::TokenId::DUMMY, + span: tt::TokenId(0), })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, + open: tt::TokenId(0), + close: tt::TokenId(0), kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -460,30 +458,30 @@ mod tests { fn test_ra_server_from_str() { let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::DUMMY, - close: tt::TokenId::DUMMY, + open: tt::TokenId(0), + close: tt::TokenId(0), kind: tt::DelimiterKind::Parenthesis, }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - span: tt::TokenId::DUMMY, + span: tt::TokenId(0), }))], }); - let t1 = TokenStream::from_str("(a)", tt::TokenId::DUMMY).unwrap(); + let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); assert_eq!(t1.token_trees.len(), 1); assert_eq!(t1.token_trees[0], subtree_paren_a); - let t2 = TokenStream::from_str("(a);", tt::TokenId::DUMMY).unwrap(); + let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); assert_eq!(t2.token_trees.len(), 2); assert_eq!(t2.token_trees[0], subtree_paren_a); - let underscore = TokenStream::from_str("_", tt::TokenId::DUMMY).unwrap(); + let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); assert_eq!( underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - span: tt::TokenId::DUMMY, + span: tt::TokenId(0), })) ); } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 04a0ae7bc7201..b04e3ca19ac1b 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ use expect_test::expect; #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]); } #[test] @@ -17,12 +17,12 @@ fn test_derive_error() { "DeriveError", r#"struct S;"#, expect![[r##" - SUBTREE $$ 4294967295 4294967295 - IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 4294967295 - LITERAL "#[derive(DeriveError)] struct S ;" 4294967295 - PUNCH ; [alone] 4294967295"##]], + SUBTREE $$ 1 1 + IDENT compile_error 1 + PUNCH ! [alone] 1 + SUBTREE () 1 1 + LITERAL "#[derive(DeriveError)] struct S ;" 1 + PUNCH ; [alone] 1"##]], ); } @@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() { "fn_like_noop", r#"ident, 0, 1, []"#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT ident 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 0 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 1 4294967295 - PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295 4294967295"#]], + SUBTREE $$ 1 1 + IDENT ident 1 + PUNCH , [alone] 1 + LITERAL 0 1 + PUNCH , [alone] 1 + LITERAL 1 1 + PUNCH , [alone] 1 + SUBTREE [] 1 1"#]], ); } @@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() { "fn_like_clone_tokens", r#"ident, []"#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT ident 4294967295 - PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295 4294967295"#]], + SUBTREE $$ 1 1 + IDENT ident 1 + PUNCH , [alone] 1 + SUBTREE [] 1 1"#]], ); } @@ -62,8 +62,8 @@ fn test_fn_like_macro_clone_raw_ident() { "fn_like_clone_tokens", "r#async", expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT r#async 4294967295"#]], + SUBTREE $$ 1 1 + IDENT r#async 1"#]], ); } @@ -73,14 +73,14 @@ fn test_fn_like_mk_literals() { "fn_like_mk_literals", r#""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - LITERAL b"byte_string" 4294967295 - LITERAL 'c' 4294967295 - LITERAL "string" 4294967295 - LITERAL 3.14f64 4294967295 - LITERAL 3.14 4294967295 - LITERAL 123i64 4294967295 - LITERAL 123 4294967295"#]], + SUBTREE $$ 1 1 + LITERAL b"byte_string" 1 + LITERAL 'c' 1 + LITERAL "string" 1 + LITERAL 3.14f64 1 + LITERAL 3.14 1 + LITERAL 123i64 1 + LITERAL 123 1"#]], ); } @@ -90,9 +90,9 @@ fn test_fn_like_mk_idents() { "fn_like_mk_idents", r#""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - IDENT standard 4294967295 - IDENT r#raw 4294967295"#]], + SUBTREE $$ 1 1 + IDENT standard 1 + IDENT r#raw 1"#]], ); } @@ -102,17 +102,17 @@ fn test_fn_like_macro_clone_literals() { "fn_like_clone_tokens", r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, expect![[r#" - SUBTREE $$ 4294967295 4294967295 - LITERAL 1u16 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 2_u32 4294967295 - PUNCH , [alone] 4294967295 - PUNCH - [alone] 4294967295 - LITERAL 4i64 4294967295 - PUNCH , [alone] 4294967295 - LITERAL 3.14f32 4294967295 - PUNCH , [alone] 4294967295 - LITERAL "hello bridge" 4294967295"#]], + SUBTREE $$ 1 1 + LITERAL 1u16 1 + PUNCH , [alone] 1 + LITERAL 2_u32 1 + PUNCH , [alone] 1 + PUNCH - [alone] 1 + LITERAL 4i64 1 + PUNCH , [alone] 1 + LITERAL 3.14f32 1 + PUNCH , [alone] 1 + LITERAL "hello bridge" 1"#]], ); } @@ -126,12 +126,12 @@ fn test_attr_macro() { r#"mod m {}"#, r#"some arguments"#, expect![[r##" - SUBTREE $$ 4294967295 4294967295 - IDENT compile_error 4294967295 - PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 4294967295 - LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295 - PUNCH ; [alone] 4294967295"##]], + SUBTREE $$ 1 1 + IDENT compile_error 1 + PUNCH ! [alone] 1 + SUBTREE () 1 1 + LITERAL "#[attr_error(some arguments)] mod m {}" 1 + PUNCH ; [alone] 1"##]], ); } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index ccfefafb2cfc8..c12096d140c98 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -2,7 +2,6 @@ use expect_test::Expect; use proc_macro_api::msg::TokenId; -use tt::Span; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; @@ -25,7 +24,9 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e } fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { - let call_site = TokenId::DUMMY; + let def_site = TokenId(0); + let call_site = TokenId(1); + let mixed_site = TokenId(2); let path = proc_macro_test_dylib_path(); let expander = dylib::Expander::new(&path).unwrap(); let fixture = parse_string(input, call_site).unwrap(); @@ -36,9 +37,9 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: macro_name, &fixture.into_subtree(call_site), attr.as_ref(), - TokenId::DUMMY, - TokenId::DUMMY, - TokenId::DUMMY, + def_site, + call_site, + mixed_site, ) .unwrap(); expect.assert_eq(&format!("{res:?}")); diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 7977d97797ad9..b1f2185162106 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -23,18 +23,11 @@ pub struct SpanData { } impl Span for SpanData { - type Anchor = Anchor; const DUMMY: Self = SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY, ctx: Ctx::DUMMY, }; - fn anchor(self) -> Self::Anchor { - self.anchor - } - fn mk(anchor: Self::Anchor, range: TextRange) -> Self { - SpanData { anchor, range, ctx: Ctx::DUMMY } - } } pub trait SpanAnchor: @@ -46,9 +39,6 @@ pub trait SpanAnchor: // FIXME: Get rid of this trait? pub trait Span: std::fmt::Debug + Copy + Sized + Eq { const DUMMY: Self; - type Anchor: Copy + fmt::Debug + Eq + std::hash::Hash; - fn anchor(self) -> Self::Anchor; - fn mk(anchor: Self::Anchor, range: TextRange) -> Self; } pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { @@ -62,18 +52,30 @@ pub enum TokenTree { } impl_from!(Leaf, Subtree for TokenTree); impl TokenTree { - pub const fn empty() -> Self { - Self::Subtree(Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![] }) + pub const fn empty(span: S) -> Self { + Self::Subtree(Subtree { + delimiter: Delimiter::invisible_spanned(span), + token_trees: vec![], + }) } pub fn subtree_or_wrap(self) -> Subtree { match self { TokenTree::Leaf(_) => { - Subtree { delimiter: Delimiter::UNSPECIFIED, token_trees: vec![self] } + Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] } } TokenTree::Subtree(s) => s, } } + pub fn subtree_or_wrap2(self, span: DelimSpan) -> Subtree { + match self { + TokenTree::Leaf(_) => Subtree { + delimiter: Delimiter::invisible_delim_spanned(span), + token_trees: vec![self], + }, + TokenTree::Subtree(s) => s, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -101,8 +103,8 @@ pub struct Subtree { } impl Subtree { - pub const fn empty() -> Self { - Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] } + pub const fn empty(span: DelimSpan) -> Self { + Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: vec![] } } pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) { @@ -119,6 +121,16 @@ impl Subtree { } } +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct DelimSpan { + pub open: S, + pub close: S, +} + +impl DelimSpan { + pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY }; +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct Delimiter { pub open: S, @@ -127,10 +139,23 @@ pub struct Delimiter { } impl Delimiter { - pub const UNSPECIFIED: Self = + pub const DUMMY_INVISIBLE: Self = Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; - pub const fn unspecified() -> Self { - Self::UNSPECIFIED + + pub const fn dummy_invisible() -> Self { + Self::DUMMY_INVISIBLE + } + + pub const fn invisible_spanned(span: S) -> Self { + Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } + } + + pub const fn invisible_delim_spanned(span: DelimSpan) -> Self { + Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible } + } + + pub fn delim_span(&self) -> DelimSpan { + DelimSpan { open: self.open, close: self.close } } } diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 06004adad34a1..c6866bbfb9b4b 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -62,6 +62,11 @@ pub use paths::{AbsPath, AbsPathBuf}; #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(pub u32); +impl FileId { + /// Think twice about using this. If this ends up in a wrong place it will cause panics! + pub const BOGUS: FileId = FileId(u32::MAX); +} + /// safe because `FileId` is a newtype of `u32` impl nohash_hasher::IsEnabled for FileId {} From c11737cd63da1893947d0d7d43ab4bfb0fdeb0ad Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 1 Dec 2023 14:58:57 +0100 Subject: [PATCH 19/80] Simplify include handling --- crates/hir-expand/src/builtin_fn_macro.rs | 55 +++++++++---------- crates/hir-expand/src/db.rs | 6 -- crates/hir-expand/src/lib.rs | 7 +-- crates/hir/src/db.rs | 4 +- crates/ide-db/src/apply_change.rs | 1 - .../src/handlers/unresolved_macro_call.rs | 16 ++++++ 6 files changed, 46 insertions(+), 43 deletions(-) diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 726b9835369fa..74ca2f7ec44e1 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -17,7 +17,7 @@ use crate::{ hygiene::span_with_def_site_ctxt, name, quote, tt::{self, DelimSpan}, - EagerCallInfo, ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, + ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc, }; macro_rules! register_builtin { @@ -575,36 +575,32 @@ fn parse_string(tt: &tt::Subtree) -> Result { fn include_expand( db: &dyn ExpandDatabase, arg_id: MacroCallId, - _tt: &tt::Subtree, + tt: &tt::Subtree, span: SpanData, ) -> ExpandResult { - match db.include_expand(arg_id) { - Ok((res, _)) => ExpandResult::ok(res.as_ref().clone()), - Err(e) => ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e), - } -} - -// FIXME: Check if this is still needed now after the token map rewrite -pub(crate) fn include_arg_to_tt( - db: &dyn ExpandDatabase, - arg_id: MacroCallId, -) -> Result<(triomphe::Arc, FileId), ExpandError> { - let loc = db.lookup_intern_macro_call(arg_id); - let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { - panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); + let path = match parse_string(tt) { + Ok(it) => it, + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } }; - let path = parse_string(&arg)?; - let file_id = relative_file(db, *arg_id, &path, false)?; - - // why are we not going through a SyntaxNode here? - let subtree = parse_to_token_tree( + let file_id = match relative_file(db, arg_id, &path, false) { + Ok(file_id) => file_id, + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e); + } + }; + match parse_to_token_tree( SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - // FIXME SyntaxContextId::ROOT, &db.file_text(file_id), - ) - .ok_or(mbe::ExpandError::ConversionError)?; - Ok((triomphe::Arc::new(subtree), file_id)) + ) { + Some(it) => ExpandResult::ok(it), + None => ExpandResult::new( + tt::Subtree::empty(DelimSpan { open: span, close: span }), + ExpandError::other("failed to parse included file"), + ), + } } fn include_bytes_expand( @@ -613,9 +609,12 @@ fn include_bytes_expand( tt: &tt::Subtree, span: SpanData, ) -> ExpandResult { - if let Err(e) = parse_string(tt) { - return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e); - } + let _path = match parse_string(tt) { + Ok(it) => it, + Err(e) => { + return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) + } + }; // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 7f77a2f51bab0..00755b16e9ffd 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -142,12 +142,6 @@ pub trait ExpandDatabase: SourceDatabase { def_crate: CrateId, id: AstId, ) -> Arc; - - #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)] - fn include_expand( - &self, - arg_id: MacroCallId, - ) -> Result<(triomphe::Arc, base_db::FileId), ExpandError>; /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 5915fe6e34465..b5f5fdd22ee74 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -208,12 +208,7 @@ impl HirFileIdExt for HirFileId { match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); - let is_include_expansion = loc.def.is_include() && loc.eager.is_some(); - file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) { - Some(Ok((_, file))) => file.into(), - _ => loc.kind.file_id(), - } + file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id(); } } } diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs index ff6f987aa1286..d98e3decd21ed 100644 --- a/crates/hir/src/db.rs +++ b/crates/hir/src/db.rs @@ -23,7 +23,7 @@ pub use hir_def::db::{ }; pub use hir_expand::db::{ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, - ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, InternSyntaxContextQuery, - MacroArgQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, + ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery, + ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery, }; pub use hir_ty::db::*; diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 67e686dad11ef..343be870c9eea 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -99,7 +99,6 @@ impl RootDatabase { hir::db::AstIdMapQuery hir::db::DeclMacroExpanderQuery hir::db::ExpandProcMacroQuery - hir::db::IncludeExpandQuery hir::db::InternMacroCallQuery hir::db::InternSyntaxContextQuery hir::db::MacroArgQuery diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 33e7c2e37c3b0..edcfa073a0fb6 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -67,6 +67,22 @@ macro_rules! m { () => {} } } self::m!(); self::m2!(); //^^ error: unresolved macro `self::m2!` +"#, + ); + } + + #[test] + #[should_panic] // FIXME: https://github.com/rust-lang/rust-analyzer/issues/14968 + fn include_does_not_break_diagnostics() { + check_diagnostics( + r#" +//- minicore: include +//- /lib.rs crate:lib +include!("include-me.rs"); +//- /include-me.rs +/// long doc that pushes the diagnostic range beyond the first file's text length +#[err] +mod prim_never {} "#, ); } From efa67294ed7d3742d5177b6ff43f077325ba48be Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 1 Dec 2023 16:29:58 +0100 Subject: [PATCH 20/80] Fix eager macro input spans being discarded --- .../hir-def/src/macro_expansion_tests/mbe.rs | 60 +++++++-- crates/hir-expand/src/eager.rs | 123 +++++++----------- crates/hir-ty/src/tests.rs | 1 + crates/hir-ty/src/tests/macros.rs | 1 + crates/mbe/src/token_map.rs | 10 +- 5 files changed, 101 insertions(+), 94 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 9f0d3938b7ea0..c4d44eab66c40 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -93,52 +93,86 @@ fn eager_expands_with_unresolved_within() { r#" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} +macro_rules! concat {} +macro_rules! identity { + ($tt:tt) => { + $tt + } +} fn main(foo: ()) { - format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + concat!("hello", identity!("world"), unresolved!(), identity!("!")); } "#, expect![[r##" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} +macro_rules! concat {} +macro_rules! identity { + ($tt:tt) => { + $tt + } +} fn main(foo: ()) { - builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + /* error: unresolved macro unresolved */"helloworld!"; } "##]], ); } #[test] -fn token_mapping_eager() { +fn concat_spans() { check( r#" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} - +macro_rules! concat {} macro_rules! identity { - ($expr:expr) => { $expr }; + ($tt:tt) => { + $tt + } } fn main(foo: ()) { - format_args/*+spans+syntaxctxt*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + macro_rules! identity { + ($tt:tt) => { + $tt + } + } + + fn main(foo: ()) { + concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!")); + } } "#, expect![[r##" #[rustc_builtin_macro] #[macro_export] -macro_rules! format_args {} - +macro_rules! concat {} macro_rules! identity { - ($expr:expr) => { $expr }; + ($tt:tt) => { + $tt + } } fn main(foo: ()) { - builtin#FileId(0):3@23..118\3# ##FileId(0):3@23..118\3#format_args#FileId(0):3@23..118\3# (#FileId(0):3@56..57\0#"{} {} {}"#FileId(0):3@57..67\0#,#FileId(0):3@67..68\0# format_args#FileId(0):3@69..80\0#!#FileId(0):3@80..81\0#(#FileId(0):3@81..82\0#"{}"#FileId(0):3@82..86\0#,#FileId(0):3@86..87\0# 0#FileId(0):3@88..89\0#)#FileId(0):3@89..90\0#,#FileId(0):3@90..91\0# foo#FileId(0):3@92..95\0#,#FileId(0):3@95..96\0# identity#FileId(0):3@97..105\0#!#FileId(0):3@105..106\0#(#FileId(0):3@106..107\0#10#FileId(0):3@107..109\0#)#FileId(0):3@109..110\0#,#FileId(0):3@110..111\0# "bar"#FileId(0):3@112..117\0#)#FileId(0):3@117..118\0# + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + macro_rules! identity { + ($tt:tt) => { + $tt + } + } + + fn main(foo: ()) { + /* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#; + } } "##]], diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index aa537af14393d..ef7200f615ccf 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -18,16 +18,15 @@ //! //! //! See the full discussion : -use base_db::{span::SyntaxContextId, CrateId, FileId}; -use rustc_hash::FxHashMap; -use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; +use base_db::{span::SyntaxContextId, CrateId}; +use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use triomphe::Arc; use crate::{ ast::{self, AstNode}, db::ExpandDatabase, mod_path::ModPath, - span::{RealSpanMap, SpanMapRef}, + span::SpanMapRef, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; @@ -59,10 +58,14 @@ pub fn expand_eager_macro_input( let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = db.parse_macro_expansion(arg_id.as_macro_file()); + let mut arg_map = ExpansionSpanMap::empty(); + let ExpandResult { value: expanded_eager_input, err } = { eager_macro_recur( db, &arg_exp_map, + &mut arg_map, + TextSize::new(0), InFile::new(arg_id.as_file(), arg_exp.syntax_node()), krate, call_site, @@ -70,14 +73,15 @@ pub fn expand_eager_macro_input( ) }; let err = parse_err.or(err); + if cfg!(debug) { + arg_map.finish(); + } let Some((expanded_eager_input, _mapping)) = expanded_eager_input else { return ExpandResult { value: None, err }; }; - // FIXME: Spans! - let mut subtree = - mbe::syntax_node_to_token_tree(&expanded_eager_input, RealSpanMap::absolute(FileId::BOGUS)); + let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; @@ -103,13 +107,7 @@ fn lazy_expand( let expand_to = ExpandTo::from_call_site(¯o_call.value); let ast_id = macro_call.with_value(ast_id); - let id = def.as_lazy_macro( - db, - krate, - MacroCallKind::FnLike { ast_id, expand_to }, - // FIXME: This is wrong - call_site, - ); + let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site); let macro_file = id.as_macro_file(); db.parse_macro_expansion(macro_file) @@ -119,46 +117,42 @@ fn lazy_expand( fn eager_macro_recur( db: &dyn ExpandDatabase, span_map: &ExpansionSpanMap, + expanded_map: &mut ExpansionSpanMap, + mut offset: TextSize, curr: InFile, krate: CrateId, call_site: SyntaxContextId, macro_resolver: &dyn Fn(ModPath) -> Option, -) -> ExpandResult)>> { +) -> ExpandResult> { let original = curr.value.clone_for_update(); - let mut mapping = FxHashMap::default(); let mut replacements = Vec::new(); // FIXME: We only report a single error inside of eager expansions let mut error = None; - let mut offset = 0i32; - let apply_offset = |it: TextSize, offset: i32| { - TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default()) - }; let mut children = original.preorder_with_tokens(); // Collect replacement while let Some(child) = children.next() { - let WalkEvent::Enter(child) = child else { continue }; let call = match child { - syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) { + WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) { Some(it) => { children.skip_subtree(); it } - None => continue, + _ => continue, }, - syntax::NodeOrToken::Token(t) => { - mapping.insert( - TextRange::new( - apply_offset(t.text_range().start(), offset), - apply_offset(t.text_range().end(), offset), - ), - t.text_range(), - ); + WalkEvent::Enter(_) => continue, + WalkEvent::Leave(child) => { + if let SyntaxElement::Token(t) = child { + let start = t.text_range().start(); + offset += t.text_range().len(); + expanded_map.push(offset, span_map.span_at(start)); + } continue; } }; + let def = match call .path() .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map))) @@ -168,11 +162,13 @@ fn eager_macro_recur( None => { error = Some(ExpandError::other(format!("unresolved macro {}", path.display(db)))); + offset += call.syntax().text_range().len(); continue; } }, None => { error = Some(ExpandError::other("malformed macro invocation")); + offset += call.syntax().text_range().len(); continue; } }; @@ -183,31 +179,22 @@ fn eager_macro_recur( krate, curr.with_value(call.clone()), def, - // FIXME: This call site is not quite right I think? We probably need to mark it? call_site, macro_resolver, ); match value { Some(call_id) => { - let ExpandResult { value, err: err2 } = + let ExpandResult { value: (parse, map), err: err2 } = db.parse_macro_expansion(call_id.as_macro_file()); - // if let Some(tt) = call.token_tree() { - // let call_tt_start = tt.syntax().text_range().start(); - // let call_start = - // apply_offset(call.syntax().text_range().start(), offset); - // if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { - // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - // value - // .1 - // .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) - // .map(|r| (r + call_start, range + call_tt_start)) - // })); - // } - // } + map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span)); + let syntax_node = parse.syntax_node(); ExpandResult { - value: Some(value.0.syntax_node().clone_for_update()), + value: Some(( + syntax_node.clone_for_update(), + offset + syntax_node.text_range().len(), + )), err: err.or(err2), } } @@ -226,6 +213,8 @@ fn eager_macro_recur( let ExpandResult { value, err: error } = eager_macro_recur( db, &tm, + expanded_map, + offset, // FIXME: We discard parse errors here parse.as_ref().map(|it| it.syntax_node()), krate, @@ -234,31 +223,7 @@ fn eager_macro_recur( ); let err = err.or(error); - // if let Some(tt) = call.token_tree() { - // let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { - // Some(db.decl_macro_expander(def.krate, ast_id)) - // } else { - // None - // }; - // let call_tt_start = tt.syntax().text_range().start(); - // let call_start = apply_offset(call.syntax().text_range().start(), offset); - // if let Some((_tt, arg_map, _)) = parse - // .file_id - // .macro_file() - // .and_then(|id| db.macro_arg(id.macro_call_id).value) - // .as_deref() - // { - // mapping.extend(arg_map.entries().filter_map(|(tid, range)| { - // tm.first_range_by_token( - // decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), - // syntax::SyntaxKind::TOMBSTONE, - // ) - // .map(|r| (r + call_start, range + call_tt_start)) - // })); - // } - // } - // FIXME: Do we need to re-use _m here? - ExpandResult { value: value.map(|(n, _m)| n), err } + ExpandResult { value, err } } }; if err.is_some() { @@ -266,16 +231,18 @@ fn eager_macro_recur( } // check if the whole original syntax is replaced if call.syntax() == &original { - return ExpandResult { value: value.zip(Some(mapping)), err: error }; + return ExpandResult { value, err: error }; } - if let Some(insert) = value { - offset += u32::from(insert.text_range().len()) as i32 - - u32::from(call.syntax().text_range().len()) as i32; - replacements.push((call, insert)); + match value { + Some((insert, new_offset)) => { + replacements.push((call, insert)); + offset = new_offset; + } + None => offset += call.syntax().text_range().len(), } } replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); - ExpandResult { value: Some((original, mapping)), err: error } + ExpandResult { value: Some((original, offset)), err: error } } diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 1446e83fa8876..9f6c237583e67 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -128,6 +128,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour None => continue, }; let def_map = module.def_map(&db); + dbg!(def_map.dump(&db)); visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it)); } defs.sort_by_key(|def| match def { diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index 1e10a6fecafb6..8091017529670 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -787,6 +787,7 @@ fn main() { } #[test] +#[should_panic] // FIXME fn infer_builtin_macros_include_child_mod() { check_types( r#" diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index c2ec30ca72fb0..5871c0f1251cb 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -17,16 +17,16 @@ pub struct TokenMap { } impl TokenMap { - pub(crate) fn empty() -> Self { + pub fn empty() -> Self { Self { spans: Vec::new() } } - pub(crate) fn finish(&mut self) { + pub fn finish(&mut self) { assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0)); self.spans.shrink_to_fit(); } - pub(crate) fn push(&mut self, offset: TextSize, span: S) { + pub fn push(&mut self, offset: TextSize, span: S) { self.spans.push((offset, span)); } @@ -54,4 +54,8 @@ impl TokenMap { let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s) } + + pub fn iter(&self) -> impl Iterator + '_ { + self.spans.iter().copied() + } } From 4375419b24eecc6892910557e6fbd848c27f79e7 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 30 Nov 2023 15:01:11 +1100 Subject: [PATCH 21/80] Rename `HandlerInner::delay_span_bug` as `HandlerInner::span_delayed_bug`. Because the corresponding `Level` is `DelayedBug` and `span_delayed_bug` follows the pattern used everywhere else: `span_err`, `span_warning`, etc. --- crates/hir-def/src/attr/builtin.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index 2ae3cd2a939d5..15dceeb8af254 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -629,7 +629,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_error, Normal, - template!(Word, List: "delay_span_bug_from_inside_query"), WarnFollowingWordOnly + template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly ), rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing), From 40da288eb040ac44e6de6d670ff8155b69706080 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 30 Nov 2023 16:14:38 +1100 Subject: [PATCH 22/80] Rename `LayoutCalculator::delay_bug` as `LayoutCalculator::delayed_bug`. To match with the previous commits. --- crates/hir-ty/src/layout.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index b2591f016d429..27c7949986878 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -110,7 +110,7 @@ struct LayoutCx<'a> { impl<'a> LayoutCalculator for LayoutCx<'a> { type TargetDataLayoutRef = &'a TargetDataLayout; - fn delay_bug(&self, txt: String) { + fn delayed_bug(&self, txt: String) { never!("{}", txt); } From d2a31acda19b5a94e1b5894f15ef0922bc286ee1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 2 Dec 2023 13:03:46 +0100 Subject: [PATCH 23/80] Fix macro expansion expression parenthesis wrapping --- crates/base-db/src/span.rs | 23 +++++- crates/hir-def/src/body/lower.rs | 2 +- crates/hir-def/src/body/tests.rs | 69 ++++++++++++++++- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/expander.rs | 8 +- crates/hir-def/src/generics.rs | 2 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 10 +-- .../macro_expansion_tests/mbe/metavar_expr.rs | 4 +- .../macro_expansion_tests/mbe/regression.rs | 4 +- .../mbe/tt_conversion.rs | 6 +- crates/hir-expand/src/db.rs | 74 +++++++++++++------ crates/hir-expand/src/hygiene.rs | 32 +++++++- crates/hir-expand/src/lib.rs | 9 +++ crates/hir-ty/src/infer/path.rs | 1 + crates/hir-ty/src/lower.rs | 6 +- crates/hir-ty/src/tests.rs | 1 - crates/mbe/src/expander/matcher.rs | 18 ++++- crates/mbe/src/expander/transcriber.rs | 11 +-- 18 files changed, 218 insertions(+), 64 deletions(-) diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index dbccbb382a7e8..6723bf97fea58 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -14,8 +14,25 @@ pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = pub type SpanData = tt::SpanData; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContextId(InternId); + +impl fmt::Debug for SyntaxContextId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if *self == Self::SELF_REF { + f.debug_tuple("SyntaxContextId") + .field(&{ + #[derive(Debug)] + #[allow(non_camel_case_types)] + struct SELF_REF; + SELF_REF + }) + .finish() + } else { + f.debug_tuple("SyntaxContextId").field(&self.0).finish() + } + } +} crate::impl_intern_key!(SyntaxContextId); impl fmt::Display for SyntaxContextId { @@ -30,7 +47,7 @@ impl SyntaxContext for SyntaxContextId { // inherent trait impls please tyvm impl SyntaxContextId { pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) }); - // veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so + // veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so // we need a special value that behaves as the current context. pub const SELF_REF: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); @@ -107,7 +124,7 @@ pub struct MacroFileId { /// `MacroCallId` identifies a particular macro invocation, like /// `println!("Hello, {}", world)`. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct MacroCallId(salsa::InternId); crate::impl_intern_key!(MacroCallId); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index e4158d7564bdb..0466068ec810b 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1025,7 +1025,7 @@ impl ExprCollector<'_> { let id = collector(self, Some(expansion.tree())); self.ast_id_map = prev_ast_id_map; - self.expander.exit(self.db, mark); + self.expander.exit(mark); id } None => collector(self, None), diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 1658757d2b6ed..048f0a13f0e71 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -2,6 +2,7 @@ mod block; use base_db::{fixture::WithFixture, SourceDatabase}; use expect_test::{expect, Expect}; +use hir_expand::db::ExpandDatabase; use crate::{test_db::TestDB, ModuleDefId}; @@ -143,7 +144,6 @@ mod m { #[test] fn desugar_builtin_format_args() { - // Regression test for a path resolution bug introduced with inner item handling. let (db, body, def) = lower( r#" //- minicore: fmt @@ -221,3 +221,70 @@ fn main() { }"#]] .assert_eq(&body.pretty_print(&db, def)) } + +#[test] +fn test_macro_hygiene() { + let (db, body, def) = lower( + r##" +//- minicore: fmt, from +//- /main.rs +mod error; + +use crate::error::error; + +fn main() { + // _ = forces body expansion instead of block def map expansion + _ = error!("Failed to resolve path `{}`", node.text()); +} +//- /error.rs +macro_rules! _error { + ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))} +} +pub(crate) use _error as error; +macro_rules! _intermediate { + ($arg:expr) => {$crate::error::SsrError::new($arg)} +} +pub(crate) use _intermediate as intermediate; + +pub struct SsrError(pub(crate) core::fmt::Arguments); + +impl SsrError { + pub(crate) fn new(message: impl Into) -> SsrError { + SsrError(message.into()) + } +} +"##, + ); + println!("{}", db.dump_syntax_contexts()); + + assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); + expect![[r#" + fn main() { + _ = $crate::error::SsrError::new( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "\"Failed to resolve path `", "`\"", + ], + &[ + builtin#lang(Argument::new_display)( + &node.text(), + ), + ], + &[ + builtin#lang(Placeholder::new)( + 0usize, + ' ', + builtin#lang(Alignment::Unknown), + 0u32, + builtin#lang(Count::Implied), + builtin#lang(Count::Implied), + ), + ], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def)) +} diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 2af4622a07243..635d13f24ad81 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -794,7 +794,7 @@ impl<'a> AssocItemCollector<'a> { self.collect(&item_tree, tree_id, &iter); - self.expander.exit(self.db, mark); + self.expander.exit(mark); } } diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index e36aa19b8ddc3..398f116d83135 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -94,8 +94,8 @@ impl Expander { ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) } } - pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.span_map = db.span_map(mark.file_id); + pub fn exit(&mut self, mut mark: Mark) { + self.span_map = mark.span_map; self.current_file_id = mark.file_id; if self.recursion_depth == u32::MAX { // Recursion limit has been reached somewhere in the macro expansion tree. Reset the @@ -174,10 +174,11 @@ impl Expander { let parse = value.cast::()?; self.recursion_depth += 1; - self.span_map = db.span_map(file_id); + let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id)); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let mark = Mark { file_id: old_file_id, + span_map: old_span_map, bomb: DropBomb::new("expansion mark dropped"), }; Some((mark, parse)) @@ -190,5 +191,6 @@ impl Expander { #[derive(Debug)] pub struct Mark { file_id: HirFileId, + span_map: SpanMap, bomb: DropBomb, } diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 6d656bf9482f1..1c1c481a8eedc 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -439,7 +439,7 @@ impl GenericParams { let ctx = expander.ctx(db); let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref); - exp.1.exit(db, mark); + exp.1.exit(mark); } } }); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index c4d44eab66c40..9bf2a50d57c96 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -997,9 +997,9 @@ macro_rules! vec { fn f() { { let mut v = Vec::new(); - v.push((1)); - v.push((2)); - v.push((3)); + v.push(1); + v.push(2); + v.push(3); v }; } @@ -1468,8 +1468,8 @@ macro_rules! matches { }; } fn main() { - match (0) { - 0|1 if (true )=>true , _=>false + match 0 { + 0|1 if true =>true , _=>false }; } "#]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index dd83e5c04d45b..967b5ad36babf 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -62,10 +62,10 @@ macro_rules !implement_methods { struct Foo; impl Foo { fn alpha() -> &'static[u32] { - &[(1), (2), (3)] + &[1, 2, 3] } fn beta() -> &'static[u32] { - &[(1), (2), (3)] + &[1, 2, 3] } } "#]], diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 71dbb400b5487..2886b2a366c04 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -39,8 +39,8 @@ fn main() { }; { let mut v = Vec::new(); - v.push((1u32)); - v.push((2)); + v.push(1u32); + v.push(2); v }; } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index b2ac7eb4094dd..ae56934f632f1 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -192,9 +192,9 @@ macro_rules! constant { ($e:expr ;) => {$e}; } -const _: () = (0.0); -const _: () = (0.); -const _: () = (0e0); +const _: () = 0.0; +const _: () = 0.; +const _: () = 0e0; "#]], ); } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 00755b16e9ffd..d2c6559b06b16 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -1,6 +1,10 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, span::SyntaxContextId, CrateId, Edition, FileId, SourceDatabase}; +use base_db::{ + salsa::{self, debug::DebugQueryTable}, + span::SyntaxContextId, + CrateId, Edition, FileId, SourceDatabase, +}; use either::Either; use limit::Limit; use mbe::{syntax_node_to_token_tree, ValueResult}; @@ -17,7 +21,7 @@ use crate::{ builtin_attr_macro::pseudo_derive_attr_expansion, builtin_fn_macro::EagerExpander, fixup::{self, SyntaxFixupUndoInfo}, - hygiene::{self, SyntaxContextData, Transparency}, + hygiene::{apply_mark, SyntaxContextData, Transparency}, span::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId, @@ -53,7 +57,7 @@ impl DeclarativeMacroExpander { ), None => self .mac - .expand(&tt, |s| s.ctx = db.apply_mark(s.ctx, call_id, self.transparency)) + .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency)) .map_err(Into::into), } } @@ -115,16 +119,11 @@ pub trait ExpandDatabase: SourceDatabase { fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; #[salsa::interned] fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId; + #[salsa::transparent] fn setup_syntax_context_root(&self) -> (); #[salsa::transparent] - #[salsa::invoke(hygiene::apply_mark)] - fn apply_mark( - &self, - ctxt: SyntaxContextId, - call_id: MacroCallId, - transparency: hygiene::Transparency, - ) -> SyntaxContextId; + fn dump_syntax_contexts(&self) -> String; /// Lowers syntactic macro call to a token tree representation. That's a firewall /// query, only typing in the macro call itself changes the returned @@ -269,7 +268,8 @@ pub fn expand_speculative( MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt), }; - let expand_to = macro_expand_to(db, actual_macro_call); + let expand_to = loc.expand_to(); + fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info); let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); @@ -318,12 +318,9 @@ fn parse_macro_expansion( macro_file: MacroFileId, ) -> ExpandResult<(Parse, Arc)> { let _p = profile::span("parse_macro_expansion"); - let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id); - - let expand_to = macro_expand_to(db, macro_file.macro_call_id); - - tracing::debug!("expanded = {}", tt.as_debug_string()); - tracing::debug!("kind = {:?}", expand_to); + let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let expand_to = loc.expand_to(); + let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); @@ -575,9 +572,9 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { fn macro_expand( db: &dyn ExpandDatabase, macro_call_id: MacroCallId, + loc: MacroCallLoc, ) -> ExpandResult> { let _p = profile::span("macro_expand"); - let loc = db.lookup_intern_macro_call(macro_call_id); let ExpandResult { value: tt, mut err } = match loc.def.kind { MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), @@ -711,10 +708,6 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult ExpandTo { - db.lookup_intern_macro_call(id).expand_to() -} - fn token_tree_to_syntax_node( tt: &tt::Subtree, expand_to: ExpandTo, @@ -751,3 +744,40 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult> fn setup_syntax_context_root(db: &dyn ExpandDatabase) { db.intern_syntax_context(SyntaxContextData::root()); } + +fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String { + let mut s = String::from("Expansions:"); + let mut entries = InternMacroCallLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + let id = e.key; + let expn_data = e.value.as_ref().unwrap(); + s.push_str(&format!( + "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}", + id, + expn_data.kind.file_id(), + expn_data.call_site, + SyntaxContextId::ROOT, // FIXME expn_data.def_site, + expn_data.kind.descr(), + )); + } + + s.push_str("\n\nSyntaxContexts:\n"); + let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::>(); + entries.sort_by_key(|e| e.key); + for e in entries { + struct SyntaxContextDebug<'a>( + &'a dyn ExpandDatabase, + SyntaxContextId, + &'a SyntaxContextData, + ); + + impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.2.fancy_debug(self.1, self.0, f) + } + } + stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap())); + } + s +} diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 8fdfa8af0cac0..a809e92d62221 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -8,7 +8,7 @@ use base_db::span::{MacroCallId, SpanData, SyntaxContextId}; use crate::db::ExpandDatabase; -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { pub outer_expn: Option, pub outer_transparency: Transparency, @@ -19,6 +19,18 @@ pub struct SyntaxContextData { pub opaque_and_semitransparent: SyntaxContextId, } +impl std::fmt::Debug for SyntaxContextData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SyntaxContextData") + .field("outer_expn", &self.outer_expn) + .field("outer_transparency", &self.outer_transparency) + .field("parent", &self.parent) + .field("opaque", &self.opaque) + .field("opaque_and_semitransparent", &self.opaque_and_semitransparent) + .finish() + } +} + impl SyntaxContextData { pub fn root() -> Self { SyntaxContextData { @@ -29,6 +41,22 @@ impl SyntaxContextData { opaque_and_semitransparent: SyntaxContextId::ROOT, } } + + pub fn fancy_debug( + self, + self_id: SyntaxContextId, + db: &dyn ExpandDatabase, + f: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?; + match self.outer_expn { + Some(id) => { + write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)? + } + None => write!(f, "root")?, + } + write!(f, ", {:?})", self.outer_transparency) + } } /// A property of a macro expansion that determines how identifiers @@ -80,7 +108,7 @@ fn span_with_ctxt_from_mark( expn_id: MacroCallId, transparency: Transparency, ) -> SpanData { - SpanData { ctx: db.apply_mark(SyntaxContextId::ROOT, expn_id, transparency), ..span } + SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span } } pub(super) fn apply_mark( diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index b5f5fdd22ee74..d743f2adae5fd 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -122,6 +122,7 @@ pub struct MacroDefId { pub kind: MacroDefKind, pub local_inner: bool, pub allow_internal_unsafe: bool, + // pub def_site: SyntaxContextId, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -463,6 +464,14 @@ impl MacroCallLoc { } impl MacroCallKind { + fn descr(&self) -> &'static str { + match self { + MacroCallKind::FnLike { .. } => "macro call", + MacroCallKind::Derive { .. } => "derive macro", + MacroCallKind::Attr { .. } => "attribute macro", + } + } + /// Returns the file containing the macro invocation. fn file_id(&self) -> HirFileId { match *self { diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index c6bbf2f614071..fcfe1a3b5cf45 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -390,6 +390,7 @@ impl InferenceContext<'_> { } } +#[derive(Debug)] enum ValuePathResolution { // It's awkward to wrap a single ID in two enums, but we need both and this saves fallible // conversion between them + `unwrap()`. diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 9f5b59b239a4b..5122021d6d2bd 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> { drop(expander); let ty = self.lower_ty(&type_ref); - self.expander - .borrow_mut() - .as_mut() - .unwrap() - .exit(self.db.upcast(), mark); + self.expander.borrow_mut().as_mut().unwrap().exit(mark); Some(ty) } _ => { diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 9f6c237583e67..1446e83fa8876 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -128,7 +128,6 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour None => continue, }; let def_map = module.def_map(&db); - dbg!(def_map.dump(&db)); visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it)); } defs.sort_by_key(|def| match def { diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 5e1ceacf12c36..012b02a3f87ab 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -792,9 +792,21 @@ fn match_meta_var( } _ => {} }; - return input - .expect_fragment(parser::PrefixEntryPoint::Expr) - .map(|tt| tt.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Expr)); + return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { + tt.map(|tt| match tt { + tt::TokenTree::Leaf(leaf) => tt::Subtree { + delimiter: tt::Delimiter::dummy_invisible(), + token_trees: vec![leaf.into()], + }, + tt::TokenTree::Subtree(mut s) => { + if s.delimiter.kind == tt::DelimiterKind::Invisible { + s.delimiter.kind = tt::DelimiterKind::Parenthesis; + } + s + } + }) + .map(Fragment::Expr) + }); } MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { let tt_result = match kind { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 40f683d846268..e4a46c16597fc 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -444,15 +444,8 @@ fn expand_repeat( fn push_fragment(buf: &mut Vec>, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), - Fragment::Expr(mut tt) => { - if tt.delimiter.kind == tt::DelimiterKind::Invisible { - tt.delimiter = tt::Delimiter { - open: S::DUMMY, - close: S::DUMMY, - kind: tt::DelimiterKind::Parenthesis, - }; - } - buf.push(tt.into()) + Fragment::Expr(sub) => { + push_subtree(buf, sub); } Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), Fragment::Tokens(tt) => buf.push(tt), From 5edf7bddc675b8184a75f6ae386481f963958d44 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 2 Dec 2023 13:34:40 +0100 Subject: [PATCH 24/80] Fix mod item in included file resolving incorrectly --- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- crates/hir-expand/src/builtin_fn_macro.rs | 25 +++++++--------- crates/hir-expand/src/lib.rs | 29 +++++++++++++++++-- crates/hir-ty/src/tests/macros.rs | 1 - .../src/handlers/unresolved_macro_call.rs | 16 ---------- .../src/integrated_benchmarks.rs | 2 +- 6 files changed, 39 insertions(+), 36 deletions(-) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 22802433aa58d..9dcb9717297fa 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -66,7 +66,7 @@ impl ModDir { attr_path: Option<&SmolStr>, ) -> Result<(FileId, bool, ModDir), Box<[String]>> { let name = name.unescaped(); - let orig_file_id = file_id.original_file(db.upcast()); + let orig_file_id = file_id.original_file_respecting_includes(db.upcast()); let mut candidate_files = ArrayVec::<_, 2>::new(); match attr_path { diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 74ca2f7ec44e1..4b2f27bd46506 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -578,18 +578,12 @@ fn include_expand( tt: &tt::Subtree, span: SpanData, ) -> ExpandResult { - let path = match parse_string(tt) { + let file_id = match include_input_to_file_id(db, arg_id, tt) { Ok(it) => it, Err(e) => { return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) } }; - let file_id = match relative_file(db, arg_id, &path, false) { - Ok(file_id) => file_id, - Err(e) => { - return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e); - } - }; match parse_to_token_tree( SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, SyntaxContextId::ROOT, @@ -603,19 +597,20 @@ fn include_expand( } } +pub fn include_input_to_file_id( + db: &dyn ExpandDatabase, + arg_id: MacroCallId, + arg: &tt::Subtree, +) -> Result { + relative_file(db, arg_id, &parse_string(arg)?, false) +} + fn include_bytes_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, - tt: &tt::Subtree, + _tt: &tt::Subtree, span: SpanData, ) -> ExpandResult { - let _path = match parse_string(tt) { - Ok(it) => it, - Err(e) => { - return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e) - } - }; - // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { delimiter: tt::Delimiter::dummy_invisible(), diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index d743f2adae5fd..9caf5513bf84c 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -136,7 +136,7 @@ pub enum MacroDefKind { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -struct EagerCallInfo { +pub struct EagerCallInfo { /// The expanded argument of the eager macro. arg: Arc, /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). @@ -176,6 +176,8 @@ pub trait HirFileIdExt { /// expansion originated from. fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; + /// If this is a macro call, returns the syntax node of the call. fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; @@ -215,6 +217,29 @@ impl HirFileIdExt for HirFileId { } } + fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId { + loop { + match self.repr() { + base_db::span::HirFileIdRepr::FileId(id) => break id, + base_db::span::HirFileIdRepr::MacroFile(file) => { + let loc = db.lookup_intern_macro_call(file.macro_call_id); + if loc.def.is_include() { + if let Some(eager) = &loc.eager { + if let Ok(it) = builtin_fn_macro::include_input_to_file_id( + db, + file.macro_call_id, + &eager.arg, + ) { + break it; + } + } + } + self = loc.kind.file_id(); + } + } + } + } + fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); @@ -473,7 +498,7 @@ impl MacroCallKind { } /// Returns the file containing the macro invocation. - fn file_id(&self) -> HirFileId { + pub fn file_id(&self) -> HirFileId { match *self { MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. } | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. } diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index 8091017529670..1e10a6fecafb6 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -787,7 +787,6 @@ fn main() { } #[test] -#[should_panic] // FIXME fn infer_builtin_macros_include_child_mod() { check_types( r#" diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index edcfa073a0fb6..33e7c2e37c3b0 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -67,22 +67,6 @@ macro_rules! m { () => {} } } self::m!(); self::m2!(); //^^ error: unresolved macro `self::m2!` -"#, - ); - } - - #[test] - #[should_panic] // FIXME: https://github.com/rust-lang/rust-analyzer/issues/14968 - fn include_does_not_break_diagnostics() { - check_diagnostics( - r#" -//- minicore: include -//- /lib.rs crate:lib -include!("include-me.rs"); -//- /include-me.rs -/// long doc that pushes the diagnostic range beyond the first file's text length -#[err] -mod prim_never {} "#, ); } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index a865d9e4ab821..ed2cf07551b0c 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -30,7 +30,7 @@ fn integrated_highlighting_benchmark() { // Load rust-analyzer itself. let workspace_to_load = project_root(); - let file = "./crates/ide-db/src/apply_change.rs"; + let file = "./crates/rust-analyzer/src/config.rs"; let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { From 02a3a9438a541ac15290a49c97c0c6ceffb23cd5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 2 Dec 2023 16:50:21 +0100 Subject: [PATCH 25/80] Some more minor cleanups --- crates/base-db/src/lib.rs | 1 - crates/hir-def/src/body/tests.rs | 48 +++++++++++++++++++++++++++++-- crates/hir-expand/src/lib.rs | 5 ++-- crates/hir-ty/src/tests/macros.rs | 12 ++++---- crates/hir/src/semantics.rs | 7 +---- crates/ide/src/expand_macro.rs | 4 +-- crates/test-utils/src/minicore.rs | 9 +++++- 7 files changed, 65 insertions(+), 21 deletions(-) diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 38a2641230b76..cddd5bdf48eec 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -4,7 +4,6 @@ mod input; mod change; -// FIXME: Is this purely a test util mod? Consider #[cfg(test)] gating it. pub mod fixture; pub mod span; diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 048f0a13f0e71..5517abb1ab70f 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -2,7 +2,6 @@ mod block; use base_db::{fixture::WithFixture, SourceDatabase}; use expect_test::{expect, Expect}; -use hir_expand::db::ExpandDatabase; use crate::{test_db::TestDB, ModuleDefId}; @@ -255,7 +254,6 @@ impl SsrError { } "##, ); - println!("{}", db.dump_syntax_contexts()); assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]); expect![[r#" @@ -288,3 +286,49 @@ impl SsrError { }"#]] .assert_eq(&body.pretty_print(&db, def)) } + +#[test] +fn regression_10300() { + let (db, body, def) = lower( + r#" +//- minicore: concat, panic +mod private { + pub use core::concat; +} + +macro_rules! m { + () => { + panic!(concat!($crate::private::concat!("cc"))); + }; +} + +fn f() { + m!(); +} +"#, + ); + + let (_, source_map) = db.body_with_source_map(def.into()); + assert_eq!(source_map.diagnostics(), &[]); + + for (_, def_map) in body.blocks(&db) { + assert_eq!(def_map.diagnostics(), &[]); + } + + expect![[r#" + fn f() { + $crate::panicking::panic_fmt( + builtin#lang(Arguments::new_v1_formatted)( + &[ + "\"cc\"", + ], + &[], + &[], + unsafe { + builtin#lang(UnsafeArg::new)() + }, + ), + ); + }"#]] + .assert_eq(&body.pretty_print(&db, def)) +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 9caf5513bf84c..f328431372ec6 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -628,14 +628,13 @@ impl ExpansionInfo { span: SpanData, // FIXME: use this for range mapping, so that we can resolve inline format args _relative_token_offset: Option, - // FIXME: ret ty should be wrapped in InMacroFile - ) -> Option> + 'a> { + ) -> Option> + 'a> { let tokens = self .exp_map .ranges_with_span(span) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); - Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) + Some(tokens.map(move |token| InMacroFile::new(self.expanded.file_id, token))) } /// Maps up the text range out of the expansion hierarchy back into the original file its from. diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index 1e10a6fecafb6..d16e0eb0137bf 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -63,10 +63,10 @@ fn infer_macros_expanded() { } "#, expect![[r#" - !0..21 '{Foo(v...2),])}': Foo + !0..17 '{Foo(v...,2,])}': Foo !1..4 'Foo': Foo({unknown}) -> Foo - !1..20 'Foo(ve...(2),])': Foo - !5..19 'vec![(1),(2),]': {unknown} + !1..16 'Foo(vec![1,2,])': Foo + !5..15 'vec![1,2,]': {unknown} 155..181 '{ ...,2); }': () 165..166 'x': Foo "#]], @@ -96,10 +96,10 @@ fn infer_legacy_textual_scoped_macros_expanded() { } "#, expect![[r#" - !0..21 '{Foo(v...2),])}': Foo + !0..17 '{Foo(v...,2,])}': Foo !1..4 'Foo': Foo({unknown}) -> Foo - !1..20 'Foo(ve...(2),])': Foo - !5..19 'vec![(1),(2),]': {unknown} + !1..16 'Foo(vec![1,2,])': Foo + !5..15 'vec![1,2,]': {unknown} 194..250 '{ ...,2); }': () 204..205 'x': Foo 227..228 'y': {unknown} diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 38c4f081b7604..ed3d3f1a3b694 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -557,11 +557,6 @@ impl<'db> SemanticsImpl<'db> { .span_at(token.text_range().start()), }; - // fetch span information of token in real file, then use that look through expansions of - // calls the token is in and afterwards recursively with the same span. - // what about things where spans change? Due to being joined etc, that is we don't find the - // exact span anymore? - let def_map = sa.resolver.def_map(); let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; @@ -580,7 +575,7 @@ impl<'db> SemanticsImpl<'db> { let len = stack.len(); // requeue the tokens we got from mapping our current token down - stack.extend(mapped_tokens); + stack.extend(mapped_tokens.map(Into::into)); // if the length changed we have found a mapping for the token (stack.len() != len).then_some(()) }; diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index cca3e68d50244..a70f335ada3cf 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -340,8 +340,8 @@ fn main() { expect![[r#" match_ast! { - if let Some(it) = ast::TraitDef::cast((container).clone()){} - else if let Some(it) = ast::ImplDef::cast((container).clone()){} + if let Some(it) = ast::TraitDef::cast(container.clone()){} + else if let Some(it) = ast::ImplDef::cast(container.clone()){} else { { continue diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index f2ca9d82ed0dd..8ebc9909b6b23 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -15,6 +15,7 @@ //! cell: copy, drop //! clone: sized //! coerce_unsized: unsize +//! concat: //! copy: clone //! default: sized //! deref_mut: deref @@ -1353,7 +1354,7 @@ mod panicking { mod macros { // region:panic #[macro_export] - #[rustc_builtin_macro(std_panic)] + #[rustc_builtin_macro(core_panic)] macro_rules! panic { ($($arg:tt)*) => { /* compiler built-in */ @@ -1406,6 +1407,12 @@ mod macros { ($file:expr $(,)?) => {{ /* compiler built-in */ }}; } // endregion:include + + // region:concat + #[rustc_builtin_macro] + #[macro_export] + macro_rules! concat {} + // endregion:concat } // region:non_zero From 81410ab50065db9c657cfbb3ef6333119a209d5d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 2 Dec 2023 19:32:53 +0100 Subject: [PATCH 26/80] Cleanup FileId stuff --- crates/hir-expand/src/files.rs | 79 ++++++++++--------- crates/hir-expand/src/lib.rs | 13 +-- .../src/handlers/remove_unused_imports.rs | 4 +- crates/ide-db/src/search.rs | 7 +- crates/ide/src/navigation_target.rs | 9 ++- 5 files changed, 64 insertions(+), 48 deletions(-) diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 36eb2dbb27c4f..ed8639d7a1d94 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -83,26 +83,41 @@ impl InFileWrapper> { // endregion:transpose impls -// region:specific impls +trait FileIdToSyntax: Copy { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode; +} -impl InFile { - pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_or_expand(self.file_id) +impl FileIdToSyntax for FileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse(self).syntax_node() + } +} +impl FileIdToSyntax for MacroFileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_macro_expansion(self).value.0.syntax_node() + } +} +impl FileIdToSyntax for HirFileId { + fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + db.parse_or_expand(self) } } -impl InRealFile { +#[allow(private_bounds)] +impl InFileWrapper { pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse(self.file_id).syntax_node() + FileIdToSyntax::file_syntax(self.file_id, db) } } -impl InMacroFile { - pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { - db.parse_macro_expansion(self.file_id).value.0.syntax_node() +impl InFileWrapper { + pub fn syntax(&self) -> InFileWrapper { + self.with_value(self.value.syntax()) } } +// region:specific impls + impl InFile<&SyntaxNode> { pub fn ancestors_with_macros( self, @@ -241,9 +256,15 @@ impl InFile { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { - if let Some(res) = self.original_file_range_opt(db) { - return res; + let (range, ctxt) = ExpansionInfo::new(db, mac_file) + .map_token_range_up(db, self.value.text_range()); + + // FIXME: Figure out an API that makes proper use of ctx, this only exists to + // keep pre-token map rewrite behaviour. + if ctxt.is_root() { + return range; } + // Fall back to whole macro call. let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); loc.kind.original_call_range(db) @@ -257,8 +278,9 @@ impl InFile { HirFileIdRepr::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) } - HirFileIdRepr::MacroFile(_) => { - let (range, ctxt) = ascend_range_up_macros(db, self.map(|it| it.text_range())); + HirFileIdRepr::MacroFile(mac_file) => { + let (range, ctxt) = ExpansionInfo::new(db, mac_file) + .map_token_range_up(db, self.value.text_range()); // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -275,16 +297,19 @@ impl InFile { impl InFile { /// Attempts to map the syntax node back up its macro calls. pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - let (range, _ctxt) = ascend_range_up_macros(db, self); + let (range, _ctxt) = match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) + } + HirFileIdRepr::MacroFile(m) => { + ExpansionInfo::new(db, m).map_token_range_up(db, self.value) + } + }; range } } impl InFile { - pub fn descendants(self) -> impl Iterator> { - self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) - } - pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, // as we don't have node inputs otherwise and therefore can't find an `N` node in the input @@ -312,22 +337,4 @@ impl InFile { let value = anc.ancestors().find_map(N::cast)?; Some(InRealFile::new(file_id, value)) } - - pub fn syntax(&self) -> InFile<&SyntaxNode> { - self.with_value(self.value.syntax()) - } -} - -fn ascend_range_up_macros( - db: &dyn db::ExpandDatabase, - range: InFile, -) -> (FileRange, SyntaxContextId) { - match range.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - (FileRange { file_id, range: range.value }, SyntaxContextId::ROOT) - } - HirFileIdRepr::MacroFile(m) => { - ExpansionInfo::new(db, m).map_token_range_up(db, range.value) - } - } } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index f328431372ec6..1d5b7dfa59433 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -172,17 +172,18 @@ pub enum MacroCallKind { } pub trait HirFileIdExt { - /// For macro-expansion files, returns the file original source file the - /// expansion originated from. + /// Returns the original file of this macro call hierarchy. fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId; + /// Returns the original file of this macro call hierarchy while going into the included file if + /// one of the calls comes from an `include!``. fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; /// If this is a macro call, returns the syntax node of the call. fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)>; + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option>; /// Return expansion information if it is a macro-expansion file fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option; @@ -246,11 +247,13 @@ impl HirFileIdExt for HirFileId { Some(loc.to_node(db)) } - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> { + fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { match call.file_id.repr() { - HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), + HirFileIdRepr::FileId(file_id) => { + break Some(InRealFile { file_id, value: call.value }) + } HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).to_node(db); } diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs index 10076e60c3e8b..ee44064e7c5e7 100644 --- a/crates/ide-assists/src/handlers/remove_unused_imports.rs +++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs @@ -1,6 +1,6 @@ use std::collections::{hash_map::Entry, HashMap}; -use hir::{HirFileIdExt, InFile, Module, ModuleSource}; +use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource}; use ide_db::{ base_db::FileRange, defs::Definition, @@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec Vec { let (file_id, range) = { let InFile { file_id, value } = module.definition_source(db); - if let Some((file_id, call_source)) = file_id.original_call_node(db) { + if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) { (file_id, Some(call_source.text_range())) } else { ( diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 68f2ad494570b..2ce036c044ad5 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,8 +8,8 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics, - Visibility, + AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource, + Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; @@ -133,7 +133,8 @@ impl SearchScope { let (file_id, range) = { let InFile { file_id, value } = module.definition_source(db); - if let Some((file_id, call_source)) = file_id.original_call_node(db) { + if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) + { (file_id, Some(call_source.text_range())) } else { ( diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 1e4d0e8cdc6e6..df0c4a6adee26 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -169,8 +169,13 @@ impl TryToNav for FileSymbol { fn try_to_nav(&self, db: &RootDatabase) -> Option { let full_range = self.loc.original_range(db); let focus_range = self.loc.original_name_range(db); - let focus_range = - if focus_range.file_id == full_range.file_id { Some(focus_range.range) } else { None }; + let focus_range = if focus_range.file_id == full_range.file_id + && full_range.range.contains_range(focus_range.range) + { + Some(focus_range.range) + } else { + None + }; Some(NavigationTarget { file_id: full_range.file_id, From 18f1a3c3c6ffc6179119e2503854807abbf9cd32 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 3 Dec 2023 18:50:29 +0100 Subject: [PATCH 27/80] Some final touches --- crates/hir-expand/src/lib.rs | 2 +- crates/hir-expand/src/quote.rs | 25 +++++++++++++--------- crates/hir-expand/src/span.rs | 3 +-- crates/mbe/src/benchmark.rs | 29 +++++++------------------- crates/mbe/src/expander/transcriber.rs | 17 +++++++++++---- crates/mbe/src/lib.rs | 2 +- crates/mbe/src/syntax_bridge.rs | 29 ++++++++++++++++---------- crates/mbe/src/token_map.rs | 24 ++++++++++++--------- crates/tt/src/lib.rs | 20 +++++++++++++----- crates/vfs/src/lib.rs | 2 +- 10 files changed, 86 insertions(+), 67 deletions(-) diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 1d5b7dfa59433..bc7b5fb211d1b 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -663,7 +663,7 @@ impl ExpansionInfo { range: TextRange, ) -> Option<(FileRange, SyntaxContextId)> { debug_assert!(self.expanded.value.text_range().contains_range(range)); - let mut spans = self.exp_map.spans_for_node_range(range); + let mut spans = self.exp_map.spans_for_range(range); let SpanData { range, anchor, ctx } = spans.next()?; let mut start = range.start(); let mut end = range.end(); diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 069bcc3bd8e2f..0950f5d287555 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -215,10 +215,18 @@ impl_to_to_tokentrees! { #[cfg(test)] mod tests { use crate::tt; - use ::tt::Span; + use base_db::{ + span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, + FileId, + }; use expect_test::expect; + use syntax::{TextRange, TextSize}; - const DUMMY: tt::SpanData = tt::SpanData::DUMMY; + const DUMMY: tt::SpanData = tt::SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }, + ctx: SyntaxContextId::ROOT, + }; #[test] fn test_quote_delimiters() { @@ -242,10 +250,7 @@ mod tests { } fn mk_ident(name: &str) -> crate::tt::Ident { - crate::tt::Ident { - text: name.into(), - span: ::DUMMY, - } + crate::tt::Ident { text: name.into(), span: DUMMY } } #[test] @@ -256,8 +261,8 @@ mod tests { assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); expect![[r#" - SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) } - IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(0), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); } #[test] @@ -290,8 +295,8 @@ mod tests { let list = crate::tt::Subtree { delimiter: crate::tt::Delimiter { kind: crate::tt::DelimiterKind::Brace, - open: ::DUMMY, - close: ::DUMMY, + open: DUMMY, + close: DUMMY, }, token_trees: fields.collect(), }; diff --git a/crates/hir-expand/src/span.rs b/crates/hir-expand/src/span.rs index 1703923d11a1c..0a6c22fe42dc4 100644 --- a/crates/hir-expand/src/span.rs +++ b/crates/hir-expand/src/span.rs @@ -4,13 +4,12 @@ use base_db::{ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}, FileId, }; -use mbe::TokenMap; use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize}; use triomphe::Arc; use crate::db::ExpandDatabase; -pub type ExpansionSpanMap = TokenMap; +pub type ExpansionSpanMap = mbe::SpanMap; /// Spanmap for a macro file or a real file #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 271efe1a92e9e..f503aecce2c2f 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -6,11 +6,10 @@ use syntax::{ AstNode, SmolStr, }; use test_utils::{bench, bench_fixture, skip_slow_tests}; -use tt::Span; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, + syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY, }; #[test] @@ -97,8 +96,8 @@ fn invocation_fixtures( loop { let mut subtree = tt::Subtree { delimiter: tt::Delimiter { - open: DummyTestSpanData::DUMMY, - close: DummyTestSpanData::DUMMY, + open: DUMMY, + close: DUMMY, kind: tt::DelimiterKind::Invisible, }, token_trees: vec![], @@ -211,34 +210,20 @@ fn invocation_fixtures( *seed } fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { span: DummyTestSpanData::DUMMY, text: SmolStr::new(ident) }) - .into() + tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into() } fn make_punct(char: char) -> tt::TokenTree { - tt::Leaf::Punct(tt::Punct { - span: DummyTestSpanData::DUMMY, - char, - spacing: tt::Spacing::Alone, - }) - .into() + tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into() } fn make_literal(lit: &str) -> tt::TokenTree { - tt::Leaf::Literal(tt::Literal { - span: DummyTestSpanData::DUMMY, - text: SmolStr::new(lit), - }) - .into() + tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into() } fn make_subtree( kind: tt::DelimiterKind, token_trees: Option>>, ) -> tt::TokenTree { tt::Subtree { - delimiter: tt::Delimiter { - open: DummyTestSpanData::DUMMY, - close: DummyTestSpanData::DUMMY, - kind, - }, + delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index e4a46c16597fc..7a3e8653c28ff 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -79,8 +79,8 @@ impl Bindings { } MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: S::DUMMY, - close: S::DUMMY, + open: span, + close: span, kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -225,6 +225,7 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), + // FIXME span: S::DUMMY, }) .into(), @@ -282,8 +283,12 @@ fn expand_subtree( } }; arena.push( - tt::Leaf::Literal(tt::Literal { text: c.to_string().into(), span: S::DUMMY }) - .into(), + tt::Leaf::Literal(tt::Literal { + text: c.to_string().into(), + // FIXME + span: S::DUMMY, + }) + .into(), ); } } @@ -337,7 +342,9 @@ fn expand_var( } Err(e) => ExpandResult { value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { + // FIXME open: S::DUMMY, + // FIXME close: S::DUMMY, }))), err: Some(e), @@ -479,6 +486,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Joint, + // FIXME span: S::DUMMY, }) .into(), @@ -487,6 +495,7 @@ fn fix_up_and_push_path_tt(buf: &mut Vec>, subtree: tt tt::Leaf::Punct(tt::Punct { char: ':', spacing: tt::Spacing::Alone, + // FIXME span: S::DUMMY, }) .into(), diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 10eb59bb83524..2b52e04b251e2 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -38,7 +38,7 @@ pub use crate::{ syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node, SpanMapper, }, - token_map::TokenMap, + token_map::SpanMap, }; pub use crate::syntax_bridge::dummy_test_span_utils::*; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 5722a5bd8eb04..1c46471a38320 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -13,7 +13,7 @@ use tt::{ Span, SpanData, SyntaxContext, }; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap}; #[cfg(test)] mod tests; @@ -22,7 +22,7 @@ pub trait SpanMapper { fn span_for(&self, range: TextRange) -> S; } -impl SpanMapper for TokenMap { +impl SpanMapper for SpanMap { fn span_for(&self, range: TextRange) -> S { self.span_at(range.start()) } @@ -34,10 +34,12 @@ impl> SpanMapper for &SM { } } +/// Dummy things for testing where spans don't matter. pub(crate) mod dummy_test_span_utils { use super::*; pub type DummyTestSpanData = tt::SpanData; + pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct DummyTestSpanAnchor; @@ -62,9 +64,8 @@ pub(crate) mod dummy_test_span_utils { } } -/// Convert the syntax node to a `TokenTree` (what macro -/// will consume). -/// FIXME: Flesh out the doc comment more thoroughly +/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the +/// subtree's spans. pub fn syntax_node_to_token_tree( node: &SyntaxNode, map: SpanMap, @@ -79,6 +80,9 @@ where convert_tokens(&mut c) } +/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the +/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can +/// be injected or hidden from the output. pub fn syntax_node_to_token_tree_modified( node: &SyntaxNode, map: SpanMap, @@ -107,10 +111,12 @@ where // * AssocItems(SmallVec<[ast::AssocItem; 1]>) // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> +/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`]. +/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans. pub fn token_tree_to_syntax_node( tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, TokenMap>) +) -> (Parse, SpanMap>) where SpanData: Span, Anchor: Copy, @@ -142,7 +148,8 @@ where tree_sink.finish() } -/// Convert a string to a `TokenTree` +/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided +/// anchor with the given context. pub fn parse_to_token_tree( anchor: Anchor, ctx: Ctx, @@ -161,7 +168,7 @@ where Some(convert_tokens(&mut conv)) } -/// Convert a string to a `TokenTree` +/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree. pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option> where S: Span, @@ -798,7 +805,7 @@ where cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: TokenMap>, + token_map: SpanMap>, } impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx> @@ -811,11 +818,11 @@ where cursor, text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), - token_map: TokenMap::empty(), + token_map: SpanMap::empty(), } } - fn finish(mut self) -> (Parse, TokenMap>) { + fn finish(mut self) -> (Parse, SpanMap>) { self.token_map.finish(); (self.inner.finish(), self.token_map) } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 5871c0f1251cb..b51d7575a113f 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -8,30 +8,33 @@ use tt::Span; /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] -// FIXME: Rename to SpanMap -pub struct TokenMap { - // FIXME: This needs to be sorted by (FileId, AstId) - // Then we can do a binary search on the file id, - // then a bin search on the ast id? +pub struct SpanMap { spans: Vec<(TextSize, S)>, } -impl TokenMap { +impl SpanMap { + /// Creates a new empty [`SpanMap`]. pub fn empty() -> Self { Self { spans: Vec::new() } } + /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are + /// in order. pub fn finish(&mut self) { assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0)); self.spans.shrink_to_fit(); } + /// Pushes a new span onto the [`SpanMap`]. pub fn push(&mut self, offset: TextSize, span: S) { + debug_assert!(self.spans.last().map_or(true, |&(last_offset, _)| last_offset < offset)); self.spans.push((offset, span)); } + /// Returns all [`TextRange`]s that correspond to the given span. + /// + /// Note this does a linear search through the entire backing vector. pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { - // FIXME: linear search self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { if s != span { return None; @@ -41,14 +44,15 @@ impl TokenMap { }) } - // FIXME: We need APIs for fetching the span of a token as well as for a whole node. The node - // one *is* fallible though. + /// Returns the span at the given position. pub fn span_at(&self, offset: TextSize) -> S { let entry = self.spans.partition_point(|&(it, _)| it <= offset); self.spans[entry].1 } - pub fn spans_for_node_range(&self, range: TextRange) -> impl Iterator + '_ { + /// Returns the spans associated with the given range. + /// In other words, this will return all spans that correspond to all offsets within the given range. + pub fn spans_for_range(&self, range: TextRange) -> impl Iterator + '_ { let (start, end) = (range.start(), range.end()); let start_entry = self.spans.partition_point(|&(it, _)| it <= start); let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index b1f2185162106..10e05862189f6 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -23,6 +23,7 @@ pub struct SpanData { } impl Span for SpanData { + #[allow(deprecated)] const DUMMY: Self = SpanData { range: TextRange::empty(TextSize::new(0)), anchor: Anchor::DUMMY, @@ -30,18 +31,24 @@ impl Span for SpanData { }; } -pub trait SpanAnchor: - std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash -{ +pub trait Span: std::fmt::Debug + Copy + Sized + Eq { + // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead, + // the call site or def site spans should be used in relevant places, its just that we don't + // expose those everywhere in the yet. const DUMMY: Self; } -// FIXME: Get rid of this trait? -pub trait Span: std::fmt::Debug + Copy + Sized + Eq { +// FIXME: Should not exist +pub trait SpanAnchor: + std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash +{ + #[deprecated(note = "this should not exist")] const DUMMY: Self; } +// FIXME: Should not exist pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { + #[deprecated(note = "this should not exist")] const DUMMY: Self; } @@ -128,6 +135,7 @@ pub struct DelimSpan { } impl DelimSpan { + // FIXME should not exist pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY }; } @@ -139,9 +147,11 @@ pub struct Delimiter { } impl Delimiter { + // FIXME should not exist pub const DUMMY_INVISIBLE: Self = Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible }; + // FIXME should not exist pub const fn dummy_invisible() -> Self { Self::DUMMY_INVISIBLE } diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index c6866bbfb9b4b..128559727bdcd 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -63,7 +63,7 @@ pub use paths::{AbsPath, AbsPathBuf}; pub struct FileId(pub u32); impl FileId { - /// Think twice about using this. If this ends up in a wrong place it will cause panics! + /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics! pub const BOGUS: FileId = FileId(u32::MAX); } From 30fc9329d1c1afb023bf80bea08a243e3b6ef1b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 4 Dec 2023 09:19:15 +0200 Subject: [PATCH 28/80] Merge commit 'e402c494b7c7d94a37c6d789a216187aaf9ccd3e' into sync-from-ra --- Cargo.lock | 4 +- Cargo.toml | 2 +- crates/hir-def/src/item_tree.rs | 54 ++--- crates/hir-def/src/item_tree/lower.rs | 2 +- crates/hir-ty/src/infer.rs | 21 +- crates/hir-ty/src/lower.rs | 2 +- crates/hir-ty/src/mir.rs | 14 ++ crates/hir-ty/src/mir/lower.rs | 54 +++-- crates/hir-ty/src/mir/pretty.rs | 4 +- crates/hir-ty/src/tests/patterns.rs | 24 +++ crates/hir/src/lib.rs | 30 +-- .../src/handlers/remove_parentheses.rs | 33 ++- crates/ide-completion/src/completions/dot.rs | 92 ++++++-- crates/ide-completion/src/render.rs | 116 +++++++++- crates/ide-completion/src/tests/pattern.rs | 29 +++ .../src/handlers/missing_match_arms.rs | 19 +- crates/ide/src/inlay_hints.rs | 9 +- crates/ide/src/inlay_hints/implicit_drop.rs | 204 ++++++++++++++++++ crates/ide/src/static_index.rs | 1 + crates/parser/src/grammar.rs | 10 + crates/parser/src/grammar/expressions.rs | 95 ++++---- crates/parser/src/grammar/items.rs | 1 + ..._comma_after_functional_update_syntax.rast | 66 ++++++ ...24_comma_after_functional_update_syntax.rs | 4 + .../parser/inline/err/0024_top_level_let.rast | 30 +++ .../parser/inline/err/0024_top_level_let.rs | 1 + .../rust-analyzer/src/cli/analysis_stats.rs | 1 + crates/rust-analyzer/src/config.rs | 3 + crates/rust-analyzer/src/reload.rs | 11 +- crates/test-utils/src/minicore.rs | 4 + docs/user/generated_config.adoc | 5 + editors/code/package.json | 5 + editors/code/src/debug.ts | 27 ++- editors/code/src/toolchain.ts | 14 +- 34 files changed, 821 insertions(+), 170 deletions(-) create mode 100644 crates/ide/src/inlay_hints/implicit_drop.rs create mode 100644 crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast create mode 100644 crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs create mode 100644 crates/parser/test_data/parser/inline/err/0024_top_level_let.rast create mode 100644 crates/parser/test_data/parser/inline/err/0024_top_level_let.rs diff --git a/Cargo.lock b/Cargo.lock index 5a8d971c3d4f8..876fd93aab713 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2000,9 +2000,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.8" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1ee9bd9239c339d714d657fac840c6d2a4f9c45f4f9ec7b0975113458be78db" +checksum = "d0c5a71827ac326072b6405552093e2ad2accd25a32fd78d4edc82d98c7f2409" [[package]] name = "tt" diff --git a/Cargo.toml b/Cargo.toml index 73bb9c84d2c95..272f456bf9f86 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,7 +116,7 @@ text-size = "1.1.1" rayon = "1.8.0" serde = { version = "1.0.192", features = ["derive"] } serde_json = "1.0.108" -triomphe = { version = "0.1.8", default-features = false, features = ["std"] } +triomphe = { version = "0.1.10", default-features = false, features = ["std"] } # can't upgrade due to dashmap depending on 0.12.3 currently hashbrown = { version = "0.12.3", features = [ "inline-more", diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 70b96b25739cb..473ae298c7744 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -38,7 +38,6 @@ mod tests; use std::{ fmt::{self, Debug}, hash::{Hash, Hasher}, - marker::PhantomData, ops::Index, }; @@ -340,34 +339,37 @@ pub trait ItemTreeNode: Clone { fn id_to_mod_item(id: FileItemTreeId) -> ModItem; } -pub struct FileItemTreeId { - index: Idx, - _p: PhantomData, +pub struct FileItemTreeId(Idx); + +impl FileItemTreeId { + pub fn index(&self) -> Idx { + self.0 + } } impl Clone for FileItemTreeId { fn clone(&self) -> Self { - Self { index: self.index, _p: PhantomData } + Self(self.0) } } impl Copy for FileItemTreeId {} impl PartialEq for FileItemTreeId { fn eq(&self, other: &FileItemTreeId) -> bool { - self.index == other.index + self.0 == other.0 } } impl Eq for FileItemTreeId {} impl Hash for FileItemTreeId { fn hash(&self, state: &mut H) { - self.index.hash(state) + self.0.hash(state) } } impl fmt::Debug for FileItemTreeId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.index.fmt(f) + self.0.fmt(f) } } @@ -548,7 +550,7 @@ impl Index for ItemTree { impl Index> for ItemTree { type Output = N; fn index(&self, id: FileItemTreeId) -> &N { - N::lookup(self, id.index) + N::lookup(self, id.index()) } } @@ -925,23 +927,23 @@ impl ModItem { pub fn ast_id(&self, tree: &ItemTree) -> FileAstId { match self { - ModItem::Use(it) => tree[it.index].ast_id().upcast(), - ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(), - ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(), - ModItem::Function(it) => tree[it.index].ast_id().upcast(), - ModItem::Struct(it) => tree[it.index].ast_id().upcast(), - ModItem::Union(it) => tree[it.index].ast_id().upcast(), - ModItem::Enum(it) => tree[it.index].ast_id().upcast(), - ModItem::Const(it) => tree[it.index].ast_id().upcast(), - ModItem::Static(it) => tree[it.index].ast_id().upcast(), - ModItem::Trait(it) => tree[it.index].ast_id().upcast(), - ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(), - ModItem::Impl(it) => tree[it.index].ast_id().upcast(), - ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(), - ModItem::Mod(it) => tree[it.index].ast_id().upcast(), - ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(), - ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(), - ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(), + ModItem::Use(it) => tree[it.index()].ast_id().upcast(), + ModItem::ExternCrate(it) => tree[it.index()].ast_id().upcast(), + ModItem::ExternBlock(it) => tree[it.index()].ast_id().upcast(), + ModItem::Function(it) => tree[it.index()].ast_id().upcast(), + ModItem::Struct(it) => tree[it.index()].ast_id().upcast(), + ModItem::Union(it) => tree[it.index()].ast_id().upcast(), + ModItem::Enum(it) => tree[it.index()].ast_id().upcast(), + ModItem::Const(it) => tree[it.index()].ast_id().upcast(), + ModItem::Static(it) => tree[it.index()].ast_id().upcast(), + ModItem::Trait(it) => tree[it.index()].ast_id().upcast(), + ModItem::TraitAlias(it) => tree[it.index()].ast_id().upcast(), + ModItem::Impl(it) => tree[it.index()].ast_id().upcast(), + ModItem::TypeAlias(it) => tree[it.index()].ast_id().upcast(), + ModItem::Mod(it) => tree[it.index()].ast_id().upcast(), + ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(), + ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(), + ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(), } } } diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index c898eb5f92112..6807326be5aef 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -13,7 +13,7 @@ use crate::{ use super::*; fn id(index: Idx) -> FileItemTreeId { - FileItemTreeId { index, _p: PhantomData } + FileItemTreeId(index) } pub(super) struct Ctx<'a> { diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 3d5ed1f93c0fa..8262edec22c1e 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -1152,20 +1152,15 @@ impl<'a> InferenceContext<'a> { (ty, variant) } TypeNs::TypeAliasId(it) => { - let container = it.lookup(self.db.upcast()).container; - let parent_subst = match container { - ItemContainerId::TraitId(id) => { - let subst = TyBuilder::subst_for_def(self.db, id, None) - .fill_with_inference_vars(&mut self.table) - .build(); - Some(subst) - } - // Type aliases do not exist in impls. - _ => None, + let resolved_seg = match unresolved { + None => path.segments().last().unwrap(), + Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(), }; - let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst) - .fill_with_inference_vars(&mut self.table) - .build(); + let substs = + ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None); + let ty = self.db.ty(it.into()); + let ty = self.insert_type_vars(ty.substitute(Interner, &substs)); + self.resolve_variant_on_alias(ty, unresolved, mod_path) } TypeNs::AdtSelfType(_) => { diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 04005311b673b..9f5b59b239a4b 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -768,7 +768,7 @@ impl<'a> TyLoweringContext<'a> { } } - fn substs_from_path_segment( + pub(super) fn substs_from_path_segment( &self, segment: PathSegment<'_>, def: Option, diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs index 747ca54858c80..2e6fe59d3bd80 100644 --- a/crates/hir-ty/src/mir.rs +++ b/crates/hir-ty/src/mir.rs @@ -269,6 +269,10 @@ impl ProjectionStore { impl ProjectionId { pub const EMPTY: ProjectionId = ProjectionId(0); + pub fn is_empty(self) -> bool { + self == ProjectionId::EMPTY + } + pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] { store.id_to_proj.get(&self).unwrap() } @@ -1069,6 +1073,10 @@ pub struct MirBody { } impl MirBody { + pub fn local_to_binding_map(&self) -> ArenaMap { + self.binding_locals.iter().map(|(it, y)| (*y, it)).collect() + } + fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) { fn for_operand( op: &mut Operand, @@ -1188,3 +1196,9 @@ pub enum MirSpan { } impl_from!(ExprId, PatId for MirSpan); + +impl From<&ExprId> for MirSpan { + fn from(value: &ExprId) -> Self { + (*value).into() + } +} diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 9905d522146cf..922aee011cf33 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -105,9 +105,14 @@ pub enum MirLowerError { /// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves. struct DropScopeToken; impl DropScopeToken { - fn pop_and_drop(self, ctx: &mut MirLowerCtx<'_>, current: BasicBlockId) -> BasicBlockId { + fn pop_and_drop( + self, + ctx: &mut MirLowerCtx<'_>, + current: BasicBlockId, + span: MirSpan, + ) -> BasicBlockId { std::mem::forget(self); - ctx.pop_drop_scope_internal(current) + ctx.pop_drop_scope_internal(current, span) } /// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop @@ -582,7 +587,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.lower_loop(current, place, *label, expr_id.into(), |this, begin| { let scope = this.push_drop_scope(); if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? { - current = scope.pop_and_drop(this, current); + current = scope.pop_and_drop(this, current, body.into()); this.set_goto(current, begin, expr_id.into()); } else { scope.pop_assume_dropped(this); @@ -720,7 +725,8 @@ impl<'ctx> MirLowerCtx<'ctx> { .ok_or(MirLowerError::ContinueWithoutLoop)?, }; let begin = loop_data.begin; - current = self.drop_until_scope(loop_data.drop_scope_index, current); + current = + self.drop_until_scope(loop_data.drop_scope_index, current, expr_id.into()); self.set_goto(current, begin, expr_id.into()); Ok(None) } @@ -759,7 +765,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.current_loop_blocks.as_ref().unwrap().drop_scope_index, ), }; - current = self.drop_until_scope(drop_scope, current); + current = self.drop_until_scope(drop_scope, current, expr_id.into()); self.set_goto(current, end, expr_id.into()); Ok(None) } @@ -773,7 +779,7 @@ impl<'ctx> MirLowerCtx<'ctx> { return Ok(None); } } - current = self.drop_until_scope(0, current); + current = self.drop_until_scope(0, current, expr_id.into()); self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } @@ -1782,7 +1788,7 @@ impl<'ctx> MirLowerCtx<'ctx> { return Ok(None); }; self.push_fake_read(c, p, expr.into()); - current = scope2.pop_and_drop(self, c); + current = scope2.pop_and_drop(self, c, expr.into()); } } } @@ -1793,7 +1799,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }; current = c; } - current = scope.pop_and_drop(self, current); + current = scope.pop_and_drop(self, current, span); Ok(Some(current)) } @@ -1873,9 +1879,14 @@ impl<'ctx> MirLowerCtx<'ctx> { } } - fn drop_until_scope(&mut self, scope_index: usize, mut current: BasicBlockId) -> BasicBlockId { + fn drop_until_scope( + &mut self, + scope_index: usize, + mut current: BasicBlockId, + span: MirSpan, + ) -> BasicBlockId { for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() { - self.emit_drop_and_storage_dead_for_scope(scope, &mut current); + self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span); } current } @@ -1891,17 +1902,22 @@ impl<'ctx> MirLowerCtx<'ctx> { } /// Don't call directly - fn pop_drop_scope_internal(&mut self, mut current: BasicBlockId) -> BasicBlockId { + fn pop_drop_scope_internal( + &mut self, + mut current: BasicBlockId, + span: MirSpan, + ) -> BasicBlockId { let scope = self.drop_scopes.pop().unwrap(); - self.emit_drop_and_storage_dead_for_scope(&scope, &mut current); + self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span); current } fn pop_drop_scope_assert_finished( &mut self, mut current: BasicBlockId, + span: MirSpan, ) -> Result { - current = self.pop_drop_scope_internal(current); + current = self.pop_drop_scope_internal(current, span); if !self.drop_scopes.is_empty() { implementation_error!("Mismatched count between drop scope push and pops"); } @@ -1912,6 +1928,7 @@ impl<'ctx> MirLowerCtx<'ctx> { &mut self, scope: &DropScope, current: &mut Idx, + span: MirSpan, ) { for &l in scope.locals.iter().rev() { if !self.result.locals[l].ty.clone().is_copy(self.db, self.owner) { @@ -1919,13 +1936,10 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_terminator( prev, TerminatorKind::Drop { place: l.into(), target: *current, unwind: None }, - MirSpan::Unknown, + span, ); } - self.push_statement( - *current, - StatementKind::StorageDead(l).with_span(MirSpan::Unknown), - ); + self.push_statement(*current, StatementKind::StorageDead(l).with_span(span)); } } } @@ -2002,7 +2016,7 @@ pub fn mir_body_for_closure_query( |_| true, )?; if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? { - let current = ctx.pop_drop_scope_assert_finished(current)?; + let current = ctx.pop_drop_scope_assert_finished(current, root.into())?; ctx.set_terminator(current, TerminatorKind::Return, (*root).into()); } let mut upvar_map: FxHashMap> = FxHashMap::default(); @@ -2146,7 +2160,7 @@ pub fn lower_to_mir( ctx.lower_params_and_bindings([].into_iter(), binding_picker)? }; if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? { - let current = ctx.pop_drop_scope_assert_finished(current)?; + let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?; ctx.set_terminator(current, TerminatorKind::Return, root_expr.into()); } Ok(ctx.result) diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs index 6e42bee97f796..a91f90bc249e5 100644 --- a/crates/hir-ty/src/mir/pretty.rs +++ b/crates/hir-ty/src/mir/pretty.rs @@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> { let indent = mem::take(&mut self.indent); let mut ctx = MirPrettyCtx { body: &body, - local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(), + local_to_binding: body.local_to_binding_map(), result, indent, ..*self @@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> { } fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self { - let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(); + let local_to_binding = body.local_to_binding_map(); MirPrettyCtx { body, db, diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 0f5a3e1752cf5..5d7bab09c26e6 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1129,3 +1129,27 @@ fn foo() { "#, ); } + +#[test] +fn generic_alias() { + check_types( + r#" +type Wrap = T; + +enum X { + A { cool: u32, stuff: u32 }, + B, +} + +fn main() { + let wrapped = Wrap::::A { + cool: 100, + stuff: 100, + }; + + if let Wrap::::A { cool, ..} = &wrapped {} + //^^^^ &u32 +} +"#, + ); +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 1bfbf7212bf0a..908027a2026d1 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -67,7 +67,7 @@ use hir_ty::{ known_const_to_ast, layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding}, method_resolution::{self, TyFingerprint}, - mir::{self, interpret_mir}, + mir::interpret_mir, primitive::UintTy, traits::FnTrait, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg, @@ -129,9 +129,10 @@ pub use { hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, layout::LayoutError, - mir::MirEvalError, PointerCast, Safety, }, + // FIXME: Properly encapsulate mir + hir_ty::{mir, Interner as ChalkTyInterner}, }; // These are negative re-exports: pub using these names is forbidden, they @@ -1914,17 +1915,20 @@ impl DefWithBody { if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) { - if let Some(scrut_expr) = match_expr.expr() { - acc.push( - MissingMatchArms { - scrutinee_expr: InFile::new( - source_ptr.file_id, - AstPtr::new(&scrut_expr), - ), - uncovered_patterns, - } - .into(), - ); + match match_expr.expr() { + Some(scrut_expr) if match_expr.match_arm_list().is_some() => { + acc.push( + MissingMatchArms { + scrutinee_expr: InFile::new( + source_ptr.file_id, + AstPtr::new(&scrut_expr), + ), + uncovered_patterns, + } + .into(), + ); + } + _ => {} } } } diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs index ffc32f8049968..0281b29cd4279 100644 --- a/crates/ide-assists/src/handlers/remove_parentheses.rs +++ b/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -1,4 +1,4 @@ -use syntax::{ast, AstNode}; +use syntax::{ast, AstNode, SyntaxKind, T}; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -39,7 +39,19 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> AssistId("remove_parentheses", AssistKind::Refactor), "Remove redundant parentheses", target, - |builder| builder.replace_ast(parens.into(), expr), + |builder| { + let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token()); + let need_to_add_ws = match prev_token { + Some(it) => { + let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']]; + it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind()) + } + None => false, + }; + let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() }; + + builder.replace(parens.syntax().text_range(), expr) + }, ) } @@ -49,6 +61,15 @@ mod tests { use super::*; + #[test] + fn remove_parens_space() { + check_assist( + remove_parentheses, + r#"fn f() { match$0(true) {} }"#, + r#"fn f() { match true {} }"#, + ); + } + #[test] fn remove_parens_simple() { check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#); @@ -94,8 +115,8 @@ mod tests { check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#); check_assist( remove_parentheses, - r#"fn f() { (1<2)&&$0(3>4); }"#, - r#"fn f() { (1<2)&&3>4; }"#, + r#"fn f() { (1<2) &&$0(3>4); }"#, + r#"fn f() { (1<2) && 3>4; }"#, ); } @@ -164,8 +185,8 @@ mod tests { fn remove_parens_weird_places() { check_assist( remove_parentheses, - r#"fn f() { match () { _=>$0(()) } }"#, - r#"fn f() { match () { _=>() } }"#, + r#"fn f() { match () { _ =>$0(()) } }"#, + r#"fn f() { match () { _ => () } }"#, ); check_assist( diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 5bcc867fe1810..57e06461099e5 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -26,17 +26,17 @@ pub(crate) fn complete_dot( item.add_to(acc, ctx.db); } - if let DotAccessKind::Method { .. } = dot_access.kind { - cov_mark::hit!(test_no_struct_field_completion_for_method_call); - } else { - complete_fields( - acc, - ctx, - receiver_ty, - |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), - |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), - ); - } + let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); + + complete_fields( + acc, + ctx, + receiver_ty, + |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), + |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), + is_field_access, + ); + complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); } @@ -82,6 +82,7 @@ pub(crate) fn complete_undotted_self( ) }, |acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty), + true, ); complete_methods(ctx, &ty, |func| { acc.add_method( @@ -104,18 +105,23 @@ fn complete_fields( receiver: &hir::Type, mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type), mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type), + is_field_access: bool, ) { let mut seen_names = FxHashSet::default(); for receiver in receiver.autoderef(ctx.db) { for (field, ty) in receiver.fields(ctx.db) { - if seen_names.insert(field.name(ctx.db)) { + if seen_names.insert(field.name(ctx.db)) + && (is_field_access || ty.is_fn() || ty.is_closure()) + { named_field(acc, field, ty); } } for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() { // Tuples are always the last type in a deref chain, so just check if the name is // already seen without inserting into the hashset. - if !seen_names.contains(&hir::Name::new_tuple_field(i)) { + if !seen_names.contains(&hir::Name::new_tuple_field(i)) + && (is_field_access || ty.is_fn() || ty.is_closure()) + { // Tuple fields are always public (tuple struct fields are handled above). tuple_index(acc, i, ty); } @@ -250,7 +256,6 @@ impl A { #[test] fn test_no_struct_field_completion_for_method_call() { - cov_mark::check!(test_no_struct_field_completion_for_method_call); check( r#" struct A { the_field: u32 } @@ -1172,4 +1177,63 @@ impl> Foo { "#]], ); } + + #[test] + fn test_struct_function_field_completion() { + check( + r#" +struct S { va_field: u32, fn_field: fn() } +fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() } +"#, + expect![[r#" + fd fn_field fn() + "#]], + ); + + check_edit( + "fn_field", + r#" +struct S { va_field: u32, fn_field: fn() } +fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() } +"#, + r#" +struct S { va_field: u32, fn_field: fn() } +fn foo() { (S { va_field: 0, fn_field: || {} }.fn_field)() } +"#, + ); + } + + #[test] + fn test_tuple_function_field_completion() { + check( + r#" +struct B(u32, fn()) +fn foo() { + let b = B(0, || {}); + b.$0() +} +"#, + expect![[r#" + fd 1 fn() + "#]], + ); + + check_edit( + "1", + r#" +struct B(u32, fn()) +fn foo() { + let b = B(0, || {}); + b.$0() +} +"#, + r#" +struct B(u32, fn()) +fn foo() { + let b = B(0, || {}); + (b.1)() +} +"#, + ) + } } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 00a9081985b83..048730c078d7b 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -18,9 +18,10 @@ use ide_db::{ RootDatabase, SnippetCap, SymbolKind, }; use syntax::{AstNode, SmolStr, SyntaxKind, TextRange}; +use text_edit::TextEdit; use crate::{ - context::{DotAccess, PathCompletionCtx, PathKind, PatternContext}, + context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext}, item::{Builder, CompletionRelevanceTypeMatch}, render::{ function::render_fn, @@ -147,7 +148,42 @@ pub(crate) fn render_field( .set_documentation(field.docs(db)) .set_deprecated(is_deprecated) .lookup_by(name); - item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name)); + + let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. }); + if !is_field_access || ty.is_fn() || ty.is_closure() { + let mut builder = TextEdit::builder(); + // Using TextEdit, insert '(' before the struct name and ')' before the + // dot access, then comes the field name and optionally insert function + // call parens. + + builder.replace( + ctx.source_range(), + field_with_receiver(db, receiver.as_ref(), &escaped_name).into(), + ); + + let expected_fn_type = + ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure()); + + if !expected_fn_type { + if let Some(receiver) = &dot_access.receiver { + if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { + builder.insert(receiver.syntax().text_range().start(), "(".to_string()); + builder.insert(ctx.source_range().end(), ")".to_string()); + } + } + + let is_parens_needed = + !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); + + if is_parens_needed { + builder.insert(ctx.source_range().end(), "()".to_string()); + } + } + + item.text_edit(builder.finish()); + } else { + item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name)); + } if let Some(receiver) = &dot_access.receiver { if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { if let Some(ref_match) = compute_ref_match(ctx.completion, ty) { @@ -1600,7 +1636,7 @@ fn main() { fn struct_field_method_ref() { check_kinds( r#" -struct Foo { bar: u32 } +struct Foo { bar: u32, qux: fn() } impl Foo { fn baz(&self) -> u32 { 0 } } fn foo(f: Foo) { let _: &u32 = f.b$0 } @@ -1610,30 +1646,92 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } [ CompletionItem { label: "baz()", - source_range: 98..99, - delete: 98..99, + source_range: 109..110, + delete: 109..110, insert: "baz()$0", kind: Method, lookup: "baz", detail: "fn(&self) -> u32", - ref_match: "&@96", + ref_match: "&@107", }, CompletionItem { label: "bar", - source_range: 98..99, - delete: 98..99, + source_range: 109..110, + delete: 109..110, insert: "bar", kind: SymbolKind( Field, ), detail: "u32", - ref_match: "&@96", + ref_match: "&@107", + }, + CompletionItem { + label: "qux", + source_range: 109..110, + text_edit: TextEdit { + indels: [ + Indel { + insert: "(", + delete: 107..107, + }, + Indel { + insert: "qux)()", + delete: 109..110, + }, + ], + }, + kind: SymbolKind( + Field, + ), + detail: "fn()", }, ] "#]], ); } + #[test] + fn expected_fn_type_ref() { + check_kinds( + r#" +struct S { field: fn() } + +fn foo() { + let foo: fn() = S { fields: || {}}.fi$0; +} +"#, + &[CompletionItemKind::SymbolKind(SymbolKind::Field)], + expect![[r#" + [ + CompletionItem { + label: "field", + source_range: 76..78, + delete: 76..78, + insert: "field", + kind: SymbolKind( + Field, + ), + detail: "fn()", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: Some( + Exact, + ), + is_local: false, + is_item_from_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + }, + }, + ] + "#]], + ) + } + #[test] fn qualified_path_ref() { check_kinds( diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index 8af6cce98f635..b2e8274a84d7d 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -354,6 +354,35 @@ fn outer(Foo { bar$0 }: Foo) {} ) } +#[test] +fn completes_in_record_field_pat_with_generic_type_alias() { + check_empty( + r#" +type Wrap = T; + +enum X { + A { cool: u32, stuff: u32 }, + B, +} + +fn main() { + let wrapped = Wrap::::A { + cool: 100, + stuff: 100, + }; + + if let Wrap::::A { $0 } = &wrapped {}; +} +"#, + expect![[r#" + fd cool u32 + fd stuff u32 + kw mut + kw ref + "#]], + ) +} + #[test] fn completes_in_fn_param() { check_empty( diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 84267d3d9069f..ef6a273ed8e65 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -17,7 +17,10 @@ pub(crate) fn missing_match_arms( #[cfg(test)] mod tests { - use crate::tests::check_diagnostics; + use crate::{ + tests::{check_diagnostics, check_diagnostics_with_config}, + DiagnosticsConfig, + }; #[track_caller] fn check_diagnostics_no_bails(ra_fixture: &str) { @@ -25,6 +28,20 @@ mod tests { crate::tests::check_diagnostics(ra_fixture) } + #[test] + fn empty_body() { + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.insert("syntax-error".to_string()); + check_diagnostics_with_config( + config, + r#" +fn main() { + match 0; +} +"#, + ); + } + #[test] fn empty_tuple() { check_diagnostics_no_bails( diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 24f44ca06ffb3..7ea9d4f1038b4 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -31,6 +31,7 @@ mod discriminant; mod fn_lifetime_fn; mod implicit_static; mod param_name; +mod implicit_drop; #[derive(Clone, Debug, PartialEq, Eq)] pub struct InlayHintsConfig { @@ -45,6 +46,7 @@ pub struct InlayHintsConfig { pub closure_return_type_hints: ClosureReturnTypeHints, pub closure_capture_hints: bool, pub binding_mode_hints: bool, + pub implicit_drop_hints: bool, pub lifetime_elision_hints: LifetimeElisionHints, pub param_names_for_lifetime_elision_hints: bool, pub hide_named_constructor_hints: bool, @@ -124,6 +126,7 @@ pub enum InlayKind { Lifetime, Parameter, Type, + Drop, } #[derive(Debug)] @@ -503,7 +506,10 @@ fn hints( ast::Item(it) => match it { // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints ast::Item::Impl(_) => None, - ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it), + ast::Item::Fn(it) => { + implicit_drop::hints(hints, sema, config, &it); + fn_lifetime_fn::hints(hints, config, it) + }, // static type elisions ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)), ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)), @@ -591,6 +597,7 @@ mod tests { max_length: None, closing_brace_hints_min_lines: None, fields_to_resolve: InlayFieldsToResolve::empty(), + implicit_drop_hints: false, }; pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { type_hints: true, diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs new file mode 100644 index 0000000000000..60f1f3496f6cd --- /dev/null +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -0,0 +1,204 @@ +//! Implementation of "implicit drop" inlay hints: +//! ```no_run +//! fn main() { +//! let x = vec![2]; +//! if some_condition() { +//! /* drop(x) */return; +//! } +//! } +//! ``` +use hir::{ + db::{DefDatabase as _, HirDatabase as _}, + mir::{MirSpan, TerminatorKind}, + ChalkTyInterner, DefWithBody, Semantics, +}; +use ide_db::{base_db::FileRange, RootDatabase}; + +use syntax::{ + ast::{self, AstNode}, + match_ast, +}; + +use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + def: &ast::Fn, +) -> Option<()> { + if !config.implicit_drop_hints { + return None; + } + + let def = sema.to_def(def)?; + let def: DefWithBody = def.into(); + + let source_map = sema.db.body_with_source_map(def.into()).1; + + let hir = sema.db.body(def.into()); + let mir = sema.db.mir_body(def.into()).ok()?; + + let local_to_binding = mir.local_to_binding_map(); + + for (_, bb) in mir.basic_blocks.iter() { + let terminator = bb.terminator.as_ref()?; + if let TerminatorKind::Drop { place, .. } = terminator.kind { + if !place.projection.is_empty() { + continue; // Ignore complex cases for now + } + if mir.locals[place.local].ty.adt_id(ChalkTyInterner).is_none() { + continue; // Arguably only ADTs have significant drop impls + } + let Some(binding) = local_to_binding.get(place.local) else { + continue; // Ignore temporary values + }; + let range = match terminator.span { + MirSpan::ExprId(e) => match source_map.expr_syntax(e) { + Ok(s) => { + let root = &s.file_syntax(sema.db); + let expr = s.value.to_node(root); + let expr = expr.syntax(); + match_ast! { + match expr { + ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()), + _ => expr.text_range(), + } + } + } + Err(_) => continue, + }, + MirSpan::PatId(p) => match source_map.pat_syntax(p) { + Ok(s) => s.value.text_range(), + Err(_) => continue, + }, + MirSpan::Unknown => continue, + }; + let binding = &hir.bindings[*binding]; + let binding_source = binding + .definitions + .first() + .and_then(|d| source_map.pat_syntax(*d).ok()) + .and_then(|d| { + Some(FileRange { file_id: d.file_id.file_id()?, range: d.value.text_range() }) + }); + let name = binding.name.to_smol_str(); + if name.starts_with(" Option<()> { + let x = X; + let t_opt = Some(2); + let t = t_opt?; + //^^^^^^ drop(x) + Some(()) + } + //^ drop(x) +"#, + ); + } + + #[test] + fn if_let() { + check_with_config( + ONLY_DROP_CONFIG, + r#" + struct X; + fn f() { + let x = X; + if let X = x { + let y = X; + } + //^ drop(y) + } + //^ drop(x) +"#, + ); + } +} diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index aabd26da289ca..b54874d59f8bc 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -118,6 +118,7 @@ impl StaticIndex<'_> { adjustment_hints: crate::AdjustmentHints::Never, adjustment_hints_mode: AdjustmentHintsMode::Prefix, adjustment_hints_hide_outside_unsafe: false, + implicit_drop_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, closure_style: hir::ClosureStyle::ImplFn, diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index 6a2a9adce15cb..19da297b58ccc 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -376,6 +376,16 @@ fn error_block(p: &mut Parser<'_>, message: &str) { m.complete(p, ERROR); } +// test_err top_level_let +// let ref foo: fn() = 1 + 3; +fn error_let_stmt(p: &mut Parser<'_>, message: &str) { + assert!(p.at(T![let])); + let m = p.start(); + p.error(message); + expressions::let_stmt(p, expressions::Semicolon::Optional); + m.complete(p, ERROR); +} + /// The `parser` passed this is required to at least consume one token if it returns `true`. /// If the `parser` returns false, parsing will stop. fn delimited( diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 1cbd1663230bd..e346ece2f94b6 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -59,7 +59,8 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { attributes::outer_attrs(p); if p.at(T![let]) { - let_stmt(p, m, semicolon); + let_stmt(p, semicolon); + m.complete(p, LET_STMT); return; } @@ -109,54 +110,53 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) { m.complete(p, EXPR_STMT); } } +} - // test let_stmt - // fn f() { let x: i32 = 92; } - fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) { - p.bump(T![let]); - patterns::pattern(p); - if p.at(T![:]) { - // test let_stmt_ascription - // fn f() { let x: i32; } - types::ascription(p); - } +// test let_stmt +// fn f() { let x: i32 = 92; } +pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) { + p.bump(T![let]); + patterns::pattern(p); + if p.at(T![:]) { + // test let_stmt_ascription + // fn f() { let x: i32; } + types::ascription(p); + } - let mut expr_after_eq: Option = None; - if p.eat(T![=]) { - // test let_stmt_init - // fn f() { let x = 92; } - expr_after_eq = expressions::expr(p); - } + let mut expr_after_eq: Option = None; + if p.eat(T![=]) { + // test let_stmt_init + // fn f() { let x = 92; } + expr_after_eq = expressions::expr(p); + } - if p.at(T![else]) { - // test_err let_else_right_curly_brace - // fn func() { let Some(_) = {Some(1)} else { panic!("h") };} - if let Some(expr) = expr_after_eq { - if BlockLike::is_blocklike(expr.kind()) { - p.error( - "right curly brace `}` before `else` in a `let...else` statement not allowed", - ) - } + if p.at(T![else]) { + // test_err let_else_right_curly_brace + // fn func() { let Some(_) = {Some(1)} else { panic!("h") };} + if let Some(expr) = expr_after_eq { + if BlockLike::is_blocklike(expr.kind()) { + p.error( + "right curly brace `}` before `else` in a `let...else` statement not allowed", + ) } - - // test let_else - // fn f() { let Some(x) = opt else { return }; } - let m = p.start(); - p.bump(T![else]); - block_expr(p); - m.complete(p, LET_ELSE); } - match with_semi { - Semicolon::Forbidden => (), - Semicolon::Optional => { - p.eat(T![;]); - } - Semicolon::Required => { - p.expect(T![;]); - } + // test let_else + // fn f() { let Some(x) = opt else { return }; } + let m = p.start(); + p.bump(T![else]); + block_expr(p); + m.complete(p, LET_ELSE); + } + + match with_semi { + Semicolon::Forbidden => (), + Semicolon::Optional => { + p.eat(T![;]); + } + Semicolon::Required => { + p.expect(T![;]); } - m.complete(p, LET_STMT); } } @@ -693,6 +693,17 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { // We permit `.. }` on the left-hand side of a destructuring assignment. if !p.at(T!['}']) { expr(p); + + if p.at(T![,]) { + // test_err comma_after_functional_update_syntax + // fn foo() { + // S { ..x, }; + // S { ..x, a: 0 } + // } + + // Do not bump, so we can support additional fields after this comma. + p.error("cannot use a comma after the base struct"); + } } } T!['{'] => { diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 4e850b1f74df8..34fd3420f1c99 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs @@ -79,6 +79,7 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) { e.complete(p, ERROR); } EOF | T!['}'] => p.error("expected an item"), + T![let] => error_let_stmt(p, "expected an item"), _ => p.err_and_bump("expected an item"), } } diff --git a/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast b/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast new file mode 100644 index 0000000000000..0e2fe5988d6aa --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast @@ -0,0 +1,66 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + COMMA "," + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + DOT2 ".." + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + COMMA "," + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 22: cannot use a comma after the base struct +error 38: cannot use a comma after the base struct diff --git a/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs b/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs new file mode 100644 index 0000000000000..14cf96719b464 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs @@ -0,0 +1,4 @@ +fn foo() { + S { ..x, }; + S { ..x, a: 0 } +} diff --git a/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast b/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast new file mode 100644 index 0000000000000..5ddef5f3f03f6 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast @@ -0,0 +1,30 @@ +SOURCE_FILE + ERROR + LET_KW "let" + WHITESPACE " " + IDENT_PAT + REF_KW "ref" + WHITESPACE " " + NAME + IDENT "foo" + COLON ":" + WHITESPACE " " + FN_PTR_TYPE + FN_KW "fn" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + EQ "=" + WHITESPACE " " + BIN_EXPR + LITERAL + INT_NUMBER "1" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + LITERAL + INT_NUMBER "3" + SEMICOLON ";" + WHITESPACE "\n" +error 0: expected an item diff --git a/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs b/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs new file mode 100644 index 0000000000000..3d3e7dd56c71c --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs @@ -0,0 +1 @@ +let ref foo: fn() = 1 + 3; diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 0f6539f224d79..b4debba38c974 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -783,6 +783,7 @@ impl flags::AnalysisStats { closure_return_type_hints: ide::ClosureReturnTypeHints::Always, closure_capture_hints: true, binding_mode_hints: true, + implicit_drop_hints: true, lifetime_elision_hints: ide::LifetimeElisionHints::Always, param_names_for_lifetime_elision_hints: true, hide_named_constructor_hints: false, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index f28f6ffb8748a..90d1d6b055594 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -381,6 +381,8 @@ config_data! { inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false", /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"", + /// Whether to show implicit drop hints. + inlayHints_implicitDrops_enable: bool = "false", /// Whether to show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"", /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. @@ -1391,6 +1393,7 @@ impl Config { type_hints: self.data.inlayHints_typeHints_enable, parameter_hints: self.data.inlayHints_parameterHints_enable, chaining_hints: self.data.inlayHints_chainingHints_enable, + implicit_drop_hints: self.data.inlayHints_implicitDrops_enable, discriminant_hints: match self.data.inlayHints_discriminantHints_enable { DiscriminantHintsDef::Always => ide::DiscriminantHints::Always, DiscriminantHintsDef::Never => ide::DiscriminantHints::Never, diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 8dba83ed5edd2..abe2191f40024 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -22,6 +22,7 @@ use ide_db::{ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros}, FxHashMap, }; +use itertools::Itertools; use load_cargo::{load_proc_macro, ProjectFolders}; use proc_macro_api::ProcMacroServer; use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; @@ -227,16 +228,12 @@ impl GlobalState { let mut i = 0; while i < workspaces.len() { if let Ok(w) = &workspaces[i] { - let dupes: Vec<_> = workspaces + let dupes: Vec<_> = workspaces[i + 1..] .iter() - .enumerate() - .skip(i + 1) - .filter_map(|(i, it)| { - it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i) - }) + .positions(|it| it.as_ref().is_ok_and(|ws| ws.eq_ignore_build_data(w))) .collect(); dupes.into_iter().rev().for_each(|d| { - _ = workspaces.remove(d); + _ = workspaces.remove(d + i + 1); }); } i += 1; diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index f2ca9d82ed0dd..ba5c86db0e2ea 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -1054,6 +1054,10 @@ pub mod option { Some(T), } + // region:copy + impl Copy for Option {} + // endregion:copy + impl Option { pub const fn unwrap(self) -> T { match self { diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 7091ea1ce9a7e..8a2d080844352 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -564,6 +564,11 @@ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. -- Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). -- +[[rust-analyzer.inlayHints.implicitDrops.enable]]rust-analyzer.inlayHints.implicitDrops.enable (default: `false`):: ++ +-- +Whether to show implicit drop hints. +-- [[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index c43f2b964fde9..cfaf421327753 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1264,6 +1264,11 @@ "Show prefix or postfix depending on which uses less parenthesis, preferring postfix." ] }, + "rust-analyzer.inlayHints.implicitDrops.enable": { + "markdownDescription": "Whether to show implicit drop hints.", + "default": false, + "type": "boolean" + }, "rust-analyzer.inlayHints.lifetimeElisionHints.enable": { "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.", "default": "never", diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts index e817d680eaeff..06034e1648077 100644 --- a/editors/code/src/debug.ts +++ b/editors/code/src/debug.ts @@ -3,7 +3,7 @@ import * as vscode from "vscode"; import * as path from "path"; import type * as ra from "./lsp_ext"; -import { Cargo, getRustcId, getSysroot } from "./toolchain"; +import { Cargo, type ExecutableInfo, getRustcId, getSysroot } from "./toolchain"; import type { Ctx } from "./ctx"; import { prepareEnv } from "./run"; import { unwrapUndefinable } from "./undefinable"; @@ -12,6 +12,7 @@ const debugOutput = vscode.window.createOutputChannel("Debug"); type DebugConfigProvider = ( config: ra.Runnable, executable: string, + cargoWorkspace: string, env: Record, sourceFileMap?: Record, ) => vscode.DebugConfiguration; @@ -130,7 +131,7 @@ async function getDebugConfiguration( } const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv); - const executable = await getDebugExecutable(runnable, env); + const { executable, workspace: cargoWorkspace } = await getDebugExecutableInfo(runnable, env); let sourceFileMap = debugOptions.sourceFileMap; if (sourceFileMap === "auto") { // let's try to use the default toolchain @@ -142,7 +143,13 @@ async function getDebugConfiguration( } const provider = unwrapUndefinable(knownEngines[debugEngine.id]); - const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap); + const debugConfig = provider( + runnable, + simplifyPath(executable), + cargoWorkspace, + env, + sourceFileMap, + ); if (debugConfig.type in debugOptions.engineSettings) { const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type]; for (var key in settingsMap) { @@ -164,20 +171,21 @@ async function getDebugConfiguration( return debugConfig; } -async function getDebugExecutable( +async function getDebugExecutableInfo( runnable: ra.Runnable, env: Record, -): Promise { +): Promise { const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env); - const executable = await cargo.executableFromArgs(runnable.args.cargoArgs); + const executableInfo = await cargo.executableInfoFromArgs(runnable.args.cargoArgs); // if we are here, there were no compilation errors. - return executable; + return executableInfo; } function getLldbDebugConfig( runnable: ra.Runnable, executable: string, + cargoWorkspace: string, env: Record, sourceFileMap?: Record, ): vscode.DebugConfiguration { @@ -187,7 +195,7 @@ function getLldbDebugConfig( name: runnable.label, program: executable, args: runnable.args.executableArgs, - cwd: runnable.args.workspaceRoot, + cwd: cargoWorkspace || runnable.args.workspaceRoot, sourceMap: sourceFileMap, sourceLanguages: ["rust"], env, @@ -197,6 +205,7 @@ function getLldbDebugConfig( function getCppvsDebugConfig( runnable: ra.Runnable, executable: string, + cargoWorkspace: string, env: Record, sourceFileMap?: Record, ): vscode.DebugConfiguration { @@ -206,7 +215,7 @@ function getCppvsDebugConfig( name: runnable.label, program: executable, args: runnable.args.executableArgs, - cwd: runnable.args.workspaceRoot, + cwd: cargoWorkspace || runnable.args.workspaceRoot, sourceFileMap, env, }; diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts index 58e5fc747a190..1037e513aa106 100644 --- a/editors/code/src/toolchain.ts +++ b/editors/code/src/toolchain.ts @@ -9,11 +9,17 @@ import { unwrapUndefinable } from "./undefinable"; interface CompilationArtifact { fileName: string; + workspace: string; name: string; kind: string; isTest: boolean; } +export interface ExecutableInfo { + executable: string; + workspace: string; +} + export interface ArtifactSpec { cargoArgs: string[]; filter?: (artifacts: CompilationArtifact[]) => CompilationArtifact[]; @@ -68,6 +74,7 @@ export class Cargo { artifacts.push({ fileName: message.executable, name: message.target.name, + workspace: message.manifest_path.replace(/\/Cargo\.toml$/, ""), kind: message.target.kind[0], isTest: message.profile.test, }); @@ -86,7 +93,7 @@ export class Cargo { return spec.filter?.(artifacts) ?? artifacts; } - async executableFromArgs(args: readonly string[]): Promise { + async executableInfoFromArgs(args: readonly string[]): Promise { const artifacts = await this.getArtifacts(Cargo.artifactSpec(args)); if (artifacts.length === 0) { @@ -96,7 +103,10 @@ export class Cargo { } const artifact = unwrapUndefinable(artifacts[0]); - return artifact.fileName; + return { + executable: artifact.fileName, + workspace: artifact.workspace, + }; } private async runCargo( From 05e8b926e639a13451e89fae1d9883d1afbec517 Mon Sep 17 00:00:00 2001 From: werifu Date: Mon, 4 Dec 2023 16:23:18 +0800 Subject: [PATCH 29/80] fix: bug in extract_function: should not import ControlFlow in some cases --- .../src/handlers/extract_function.rs | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 6b48d15881523..6def61a64555d 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -147,7 +147,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op _ => format_function(ctx, module, &fun, old_indent, new_indent), }; - if fn_def.contains("ControlFlow") { + // There are external control flows + if fun + .control_flow + .kind + .is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_))) + { let scope = match scope { ImportScope::File(it) => ImportScope::File(builder.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), @@ -4968,6 +4973,27 @@ pub fn testfn(arg: &mut Foo) { fn $0fun_name(arg: &mut Foo) { arg.field = 8; } +"#, + ); + } + #[test] + fn does_not_import_control_flow() { + check_assist( + extract_function, + r#" +//- minicore: try +fn func() { + $0let cf = "I'm ControlFlow";$0 +} +"#, + r#" +fn func() { + fun_name(); +} + +fn $0fun_name() { + let cf = "I'm ControlFlow"; +} "#, ); } From 20c6f270240bdb3b34d4f1399ccc29823343deed Mon Sep 17 00:00:00 2001 From: dfireBird Date: Mon, 4 Dec 2023 22:06:19 +0530 Subject: [PATCH 30/80] Insert fn call parens only if the parens inserted around field name --- .../ide-completion/src/completions/record.rs | 25 +++++++++++++++++++ crates/ide-completion/src/render.rs | 12 ++++----- 2 files changed, 31 insertions(+), 6 deletions(-) diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 945c3945bfa39..46213deb0afef 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -427,6 +427,31 @@ fn foo() { ..Default::default() }; } +"#, + ); + } + + #[test] + fn callable_field_struct_init() { + check_edit( + "field", + r#" +struct S { + field: fn(), +} + +fn main() { + S {fi$0 +} +"#, + r#" +struct S { + field: fn(), +} + +fn main() { + S {field +} "#, ); } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 048730c078d7b..830d7cabab585 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -169,14 +169,14 @@ pub(crate) fn render_field( if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) { builder.insert(receiver.syntax().text_range().start(), "(".to_string()); builder.insert(ctx.source_range().end(), ")".to_string()); - } - } - let is_parens_needed = - !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); + let is_parens_needed = + !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true }); - if is_parens_needed { - builder.insert(ctx.source_range().end(), "()".to_string()); + if is_parens_needed { + builder.insert(ctx.source_range().end(), "()".to_string()); + } + } } } From 6d2543b62256676718f1936a182f92b10f2ca8ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 4 Dec 2023 21:41:19 +0200 Subject: [PATCH 31/80] Temporarily revert delay_bug to delayed_bug change --- crates/hir-ty/src/layout.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 27c7949986878..b2591f016d429 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -110,7 +110,7 @@ struct LayoutCx<'a> { impl<'a> LayoutCalculator for LayoutCx<'a> { type TargetDataLayoutRef = &'a TargetDataLayout; - fn delayed_bug(&self, txt: String) { + fn delay_bug(&self, txt: String) { never!("{}", txt); } From 523ad0f634b15ef595ba846e837916829a43a0d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 4 Dec 2023 22:15:44 +0200 Subject: [PATCH 32/80] Disable debuginfo again --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 7347eb6c1bf86..272f456bf9f86 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ authors = ["rust-analyzer team"] [profile.dev] # Disabling debug info speeds up builds a bunch, # and we don't rely on it for debugging that much. -debug = 1 +debug = 0 [profile.dev.package] # These speed up local tests. From a7224c998d9e15fddaf80692729a38b1b0e15107 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Tue, 5 Dec 2023 11:35:09 +0100 Subject: [PATCH 33/80] Don't explicitly warn against `semicolon_in_expressions_from_macros` This has been warn-by-default for two years now and has already been added to the future-incompat lints in 1.68. --- crates/base-db/src/lib.rs | 2 +- crates/cfg/src/lib.rs | 2 +- crates/flycheck/src/lib.rs | 2 +- crates/hir-def/src/lib.rs | 2 +- crates/hir-expand/src/lib.rs | 2 +- crates/hir-ty/src/lib.rs | 2 +- crates/hir/src/lib.rs | 2 +- crates/ide-assists/src/lib.rs | 2 +- crates/ide-completion/src/lib.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- crates/ide-diagnostics/src/lib.rs | 2 +- crates/ide-ssr/src/lib.rs | 2 +- crates/ide/src/lib.rs | 2 +- crates/limit/src/lib.rs | 2 +- crates/mbe/src/lib.rs | 2 +- crates/parser/src/lib.rs | 2 +- crates/paths/src/lib.rs | 2 +- crates/proc-macro-api/src/lib.rs | 2 +- crates/proc-macro-srv/src/lib.rs | 2 +- crates/proc-macro-test/imp/src/lib.rs | 2 +- crates/proc-macro-test/src/lib.rs | 2 +- crates/profile/src/lib.rs | 2 +- crates/project-model/src/lib.rs | 2 +- crates/rust-analyzer/src/bin/main.rs | 2 +- crates/rust-analyzer/src/lib.rs | 2 +- crates/rust-analyzer/tests/slow-tests/main.rs | 2 +- crates/sourcegen/src/lib.rs | 2 +- crates/stdx/src/lib.rs | 2 +- crates/syntax/src/lib.rs | 2 +- crates/test-utils/src/lib.rs | 2 +- crates/text-edit/src/lib.rs | 2 +- crates/toolchain/src/lib.rs | 2 +- crates/tt/src/lib.rs | 2 +- crates/vfs-notify/src/lib.rs | 2 +- crates/vfs/src/lib.rs | 2 +- lib/la-arena/src/lib.rs | 2 +- lib/lsp-server/src/lib.rs | 2 +- xtask/src/main.rs | 2 +- 38 files changed, 38 insertions(+), 38 deletions(-) diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index cddd5bdf48eec..57e7934367bb2 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -1,6 +1,6 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod input; mod change; diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index 8bbe5e2a8c29b..6b178e7b04a76 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -1,6 +1,6 @@ //! cfg defines conditional compiling options, `cfg` attribute parser and evaluator -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod cfg_expr; mod dnf; diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 0749d91eb32a1..68faca51e8263 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -2,7 +2,7 @@ //! another compatible command (f.x. clippy) in a background thread and provide //! LSP diagnostics based on the output of the command. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ ffi::OsString, diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 35fb10e465e0f..7cf13a202e02c 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -7,7 +7,7 @@ //! Note that `hir_def` is a work in progress, so not all of the above is //! actually true. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[allow(unused)] diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bc7b5fb211d1b..602babcc9928b 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -4,7 +4,7 @@ //! tree originates not from the text of some `FileId`, but from some macro //! expansion. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod db; pub mod ast_id_map; diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index bcf7bfa0d2730..907a303019649 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -1,6 +1,6 @@ //! The type system. We currently use this to infer types for completion, hover //! information and various assists. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[allow(unused)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index c95c038afc967..22e14b6181263 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -17,7 +17,7 @@ //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: //! . -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "512"] diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index e6f03214ed30d..1e4d1c94f5bee 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -58,7 +58,7 @@ //! See also this post: //! -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[allow(unused)] macro_rules! eprintln { diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index aaf7cd7843afe..37a2828e8dc8f 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -1,6 +1,6 @@ //! `completions` crate provides utilities for generating completions of user input. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod completions; mod config; diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index cbd51e67810c3..fefc05e535505 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -2,7 +2,7 @@ //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod apply_change; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 6744895f3cd2e..f9fb921f4053a 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -23,7 +23,7 @@ //! There are also a couple of ad-hoc diagnostics implemented directly here, we //! don't yet have a great pattern for how to do them properly. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod handlers { pub(crate) mod break_outside_of_loop; diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs index 66832a0bee496..d756e7a63eb9b 100644 --- a/crates/ide-ssr/src/lib.rs +++ b/crates/ide-ssr/src/lib.rs @@ -3,7 +3,7 @@ //! Allows searching the AST for code that matches one or more patterns and then replacing that code //! based on a template. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] // Feature: Structural Search and Replace // diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 2320c95b4a1a3..3390331e0e8a9 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -8,7 +8,7 @@ //! in this crate. // For proving that RootDatabase is RefUnwindSafe. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index 7fb4b513a7157..7f4b00df0bac8 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -1,6 +1,6 @@ //! limit defines a struct to enforce limits. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(feature = "tracking")] use std::sync::atomic::AtomicUsize; diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 2b52e04b251e2..9331798589fcc 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -6,7 +6,7 @@ //! The tests for this functionality live in another crate: //! `hir_def::macro_expansion_tests::mbe`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod parser; mod expander; diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index fcfd1a50719bd..d9b3f46f2001c 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -17,7 +17,7 @@ //! //! [`Parser`]: crate::parser::Parser -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(rustdoc::private_intra_doc_links)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index 88b8d0aee3a49..db705a7b69ec5 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs @@ -1,7 +1,7 @@ //! Thin wrappers around `std::path`, distinguishing between absolute and //! relative paths. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ borrow::Borrow, diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 9fc5a53d935f8..f697ecd3518f1 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -5,7 +5,7 @@ //! is used to provide basic infrastructure for communication between two //! processes: Client (RA itself), Server (the external program) -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod msg; mod process; diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index dd327681c4c02..790e7936cdc52 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -12,7 +12,7 @@ #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unreachable_pub)] extern crate proc_macro; diff --git a/crates/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-test/imp/src/lib.rs index feeacdb6407af..32510fba2f8ca 100644 --- a/crates/proc-macro-test/imp/src/lib.rs +++ b/crates/proc-macro-test/imp/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; diff --git a/crates/proc-macro-test/src/lib.rs b/crates/proc-macro-test/src/lib.rs index 6d57bc81e0e33..739c6ec6f4494 100644 --- a/crates/proc-macro-test/src/lib.rs +++ b/crates/proc-macro-test/src/lib.rs @@ -1,6 +1,6 @@ //! Exports a few trivial procedural macros for testing. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub static PROC_MACRO_TEST_LOCATION: &str = include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt")); diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index e7fc3d970bfff..fdd724e2aab45 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -1,6 +1,6 @@ //! A collection of tools for profiling rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod stop_watch; mod memory_usage; diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 901dcfd2b1102..5f9b708289d16 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -15,7 +15,7 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod manifest_path; mod cargo_workspace; diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index a7d0a0b0dfc47..29bd02f92da70 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -2,7 +2,7 @@ //! //! Based on cli flags, either spawns an LSP server, or runs a batch analysis -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #[cfg(feature = "in-rust-tree")] #[allow(unused_extern_crates)] diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 6c62577f696e2..29bc0b80d8a1d 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -9,7 +9,7 @@ //! The `cli` submodule implements some batch-processing analysis, primarily as //! a debugging aid. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] pub mod cli; diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 5cd02f7840460..ec8e5c6dd9681 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -8,7 +8,7 @@ //! specific JSON shapes here -- there's little value in such tests, as we can't //! be sure without a real client anyway. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[cfg(not(feature = "in-rust-tree"))] mod sourcegen; diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 1514c6c7d4c18..18fa77fd9743f 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -6,7 +6,7 @@ //! //! This crate contains utilities to make this kind of source-gen easy. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{ fmt, fs, mem, diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index af7846d49c64d..71e269f74bc76 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -1,6 +1,6 @@ //! Missing batteries for standard libraries. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::io as sio; use std::process::Command; diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 1d7b7de390e4b..d600698040dc7 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -20,7 +20,7 @@ //! [Swift]: #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #[allow(unused)] macro_rules! eprintln { diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index fd3e68e2d2c63..e48b27313068c 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -6,7 +6,7 @@ //! * Extracting markup (mainly, `$0` markers) out of fixture strings. //! * marks (see the eponymous module). -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod assert_linear; pub mod bench_fixture; diff --git a/crates/text-edit/src/lib.rs b/crates/text-edit/src/lib.rs index 4705d18187af8..fb52a50f0b3f1 100644 --- a/crates/text-edit/src/lib.rs +++ b/crates/text-edit/src/lib.rs @@ -4,7 +4,7 @@ //! so `TextEdit` is the ultimate representation of the work done by //! rust-analyzer. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use itertools::Itertools; use std::cmp::max; diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 729f84a8150c5..997f339edc4d7 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -1,6 +1,6 @@ //! Discovery of `cargo` & `rustc` executables. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::{env, iter, path::PathBuf}; diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 10e05862189f6..481d575403aca 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -2,7 +2,7 @@ //! input and output) of macros. It closely mirrors `proc_macro` crate's //! `TokenTree`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::fmt; diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs index abfc51dfec66f..0306504371465 100644 --- a/crates/vfs-notify/src/lib.rs +++ b/crates/vfs-notify/src/lib.rs @@ -7,7 +7,7 @@ //! Hopefully, one day a reliable file watching/walking crate appears on //! crates.io, and we can reduce this to trivial glue code. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] use std::fs; diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 128559727bdcd..7360f68c735e7 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -38,7 +38,7 @@ //! [`Handle`]: loader::Handle //! [`Entries`]: loader::Entry -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod anchored_path; pub mod file_set; diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index f39c3a3e4ca12..d195bdd156bea 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -1,6 +1,6 @@ //! Yet another index-based arena. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] #![warn(missing_docs)] use std::{ diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index b190c0af73d73..2797a6b60de46 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -4,7 +4,7 @@ //! //! Run with `RUST_LOG=lsp_server=debug` to see all the messages. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod msg; mod stdio; diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 6a45033ada3ba..49f8ae79baf0e 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -8,7 +8,7 @@ //! This binary is integrated into the `cargo` command line by using an alias in //! `.cargo/config`. -#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +#![warn(rust_2018_idioms, unused_lifetimes)] mod flags; From 9dc38214c01efe22b42e7b3d1b0c6d96d6227a2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 5 Dec 2023 13:07:52 +0200 Subject: [PATCH 34/80] Fix runnable cwd on Windows --- editors/code/src/toolchain.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts index 1037e513aa106..a0b34406c1b08 100644 --- a/editors/code/src/toolchain.ts +++ b/editors/code/src/toolchain.ts @@ -74,7 +74,7 @@ export class Cargo { artifacts.push({ fileName: message.executable, name: message.target.name, - workspace: message.manifest_path.replace(/\/Cargo\.toml$/, ""), + workspace: path.dirname(message.manifest_path), kind: message.target.kind[0], isTest: message.profile.test, }); From 1834b38dbe8e3e2b5b6ca91fb61901ef8a64c3e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 5 Dec 2023 13:38:35 +0200 Subject: [PATCH 35/80] Bump ra-ap-rustc_lexer --- Cargo.lock | 14 ++++++++++++-- crates/rustc-dependencies/Cargo.toml | 2 +- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e51017c6ab6b9..8736bf6796c7c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1448,6 +1448,16 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "ra-ap-rustc_lexer" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc741c7a78103efab416b562e35bd73c8d4967478575010c86c6062f8d3cbf29" +dependencies = [ + "unicode-properties", + "unicode-xid", +] + [[package]] name = "ra-ap-rustc_parse_format" version = "0.20.0" @@ -1455,7 +1465,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6b325ee1ec90e4dbd4394913adf4ef32e4fcf2b311ec9563a0fa50cd549af6" dependencies = [ "ra-ap-rustc_index", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.20.0", ] [[package]] @@ -1607,7 +1617,7 @@ version = "0.0.0" dependencies = [ "ra-ap-rustc_abi", "ra-ap-rustc_index", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.21.0", "ra-ap-rustc_parse_format", ] diff --git a/crates/rustc-dependencies/Cargo.toml b/crates/rustc-dependencies/Cargo.toml index cd7ec30593656..af86170efdd94 100644 --- a/crates/rustc-dependencies/Cargo.toml +++ b/crates/rustc-dependencies/Cargo.toml @@ -11,7 +11,7 @@ authors.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -ra-ap-rustc_lexer = { version = "0.20.0" } +ra-ap-rustc_lexer = { version = "0.21.0" } ra-ap-rustc_parse_format = { version = "0.20.0", default-features = false } ra-ap-rustc_index = { version = "0.20.0", default-features = false } ra-ap-rustc_abi = { version = "0.20.0", default-features = false } From a9b037f51028ef8a51850ef3af950a9e13c7b56f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 5 Dec 2023 13:39:17 +0200 Subject: [PATCH 36/80] Bump ra-ap-rustc_parse_format --- Cargo.lock | 39 ++++++++++++++++++---------- crates/rustc-dependencies/Cargo.toml | 2 +- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8736bf6796c7c..14fe22b74edd3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1411,7 +1411,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5f38444d48da534b3bb612713fce9b0aeeffb2e0dfa242764f55482acc5b52d" dependencies = [ "bitflags 1.3.2", - "ra-ap-rustc_index", + "ra-ap-rustc_index 0.20.0", "tracing", ] @@ -1422,7 +1422,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69fb5da07e1a39222d9c311203123c3b6a86420fa06dc695aa1661b0aecf8d16" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros", + "ra-ap-rustc_index_macros 0.20.0", + "smallvec", +] + +[[package]] +name = "ra-ap-rustc_index" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24" +dependencies = [ + "arrayvec", + "ra-ap-rustc_index_macros 0.21.0", "smallvec", ] @@ -1439,13 +1450,15 @@ dependencies = [ ] [[package]] -name = "ra-ap-rustc_lexer" -version = "0.20.0" +name = "ra-ap-rustc_index_macros" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5e8650195795c4023d8321846466994a975bc457cb8a91c0b3b17a5fc8ba40" +checksum = "66a9424018828155a3e3596515598f90e68427d8f35eff6df7f0856c73fc58a8" dependencies = [ - "unicode-properties", - "unicode-xid", + "proc-macro2", + "quote", + "syn", + "synstructure", ] [[package]] @@ -1460,12 +1473,12 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6b325ee1ec90e4dbd4394913adf4ef32e4fcf2b311ec9563a0fa50cd549af6" +checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f" dependencies = [ - "ra-ap-rustc_index", - "ra-ap-rustc_lexer 0.20.0", + "ra-ap-rustc_index 0.21.0", + "ra-ap-rustc_lexer", ] [[package]] @@ -1616,8 +1629,8 @@ name = "rustc-dependencies" version = "0.0.0" dependencies = [ "ra-ap-rustc_abi", - "ra-ap-rustc_index", - "ra-ap-rustc_lexer 0.21.0", + "ra-ap-rustc_index 0.20.0", + "ra-ap-rustc_lexer", "ra-ap-rustc_parse_format", ] diff --git a/crates/rustc-dependencies/Cargo.toml b/crates/rustc-dependencies/Cargo.toml index af86170efdd94..3e260b175b6be 100644 --- a/crates/rustc-dependencies/Cargo.toml +++ b/crates/rustc-dependencies/Cargo.toml @@ -12,7 +12,7 @@ authors.workspace = true [dependencies] ra-ap-rustc_lexer = { version = "0.21.0" } -ra-ap-rustc_parse_format = { version = "0.20.0", default-features = false } +ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false } ra-ap-rustc_index = { version = "0.20.0", default-features = false } ra-ap-rustc_abi = { version = "0.20.0", default-features = false } From 22676ce94644f34b20da87fee446a8c9d2832952 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 5 Dec 2023 13:39:45 +0200 Subject: [PATCH 37/80] Revert "Temporarily revert delay_bug to delayed_bug change" This reverts commit 6d2543b62256676718f1936a182f92b10f2ca8ac. --- crates/hir-ty/src/layout.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index b2591f016d429..27c7949986878 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -110,7 +110,7 @@ struct LayoutCx<'a> { impl<'a> LayoutCalculator for LayoutCx<'a> { type TargetDataLayoutRef = &'a TargetDataLayout; - fn delay_bug(&self, txt: String) { + fn delayed_bug(&self, txt: String) { never!("{}", txt); } From 73b9f885f653cea6720e02cd5c6be212001eda76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 5 Dec 2023 13:40:29 +0200 Subject: [PATCH 38/80] Bump ra-ap-rustc_index and ra-ap-rustc_abi --- Cargo.lock | 35 +++++----------------------- crates/rustc-dependencies/Cargo.toml | 4 ++-- 2 files changed, 8 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 14fe22b74edd3..5ba5c15a1d3d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1406,26 +1406,15 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5f38444d48da534b3bb612713fce9b0aeeffb2e0dfa242764f55482acc5b52d" +checksum = "7816f980fab89e878ff2e916e2077d484e3aa1c619a3cc982c8a417c3dfe45fa" dependencies = [ "bitflags 1.3.2", - "ra-ap-rustc_index 0.20.0", + "ra-ap-rustc_index", "tracing", ] -[[package]] -name = "ra-ap-rustc_index" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69fb5da07e1a39222d9c311203123c3b6a86420fa06dc695aa1661b0aecf8d16" -dependencies = [ - "arrayvec", - "ra-ap-rustc_index_macros 0.20.0", - "smallvec", -] - [[package]] name = "ra-ap-rustc_index" version = "0.21.0" @@ -1433,22 +1422,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8352918d61aa4afab9f2ed7314cf638976b20949b3d61d2f468c975b0d251f24" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.21.0", + "ra-ap-rustc_index_macros", "smallvec", ] -[[package]] -name = "ra-ap-rustc_index_macros" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d69f9f6af58124f2da0cb8b0c3d8494e0d883a5fe0c6732258bde81ac5a87cc" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - [[package]] name = "ra-ap-rustc_index_macros" version = "0.21.0" @@ -1477,7 +1454,7 @@ version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d557201d71792487bd2bab637ab5be9aa6fff59b88e25e12de180b0f9d2df60f" dependencies = [ - "ra-ap-rustc_index 0.21.0", + "ra-ap-rustc_index", "ra-ap-rustc_lexer", ] @@ -1629,7 +1606,7 @@ name = "rustc-dependencies" version = "0.0.0" dependencies = [ "ra-ap-rustc_abi", - "ra-ap-rustc_index 0.20.0", + "ra-ap-rustc_index", "ra-ap-rustc_lexer", "ra-ap-rustc_parse_format", ] diff --git a/crates/rustc-dependencies/Cargo.toml b/crates/rustc-dependencies/Cargo.toml index 3e260b175b6be..1b3b6ec735e75 100644 --- a/crates/rustc-dependencies/Cargo.toml +++ b/crates/rustc-dependencies/Cargo.toml @@ -13,8 +13,8 @@ authors.workspace = true [dependencies] ra-ap-rustc_lexer = { version = "0.21.0" } ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false } -ra-ap-rustc_index = { version = "0.20.0", default-features = false } -ra-ap-rustc_abi = { version = "0.20.0", default-features = false } +ra-ap-rustc_index = { version = "0.21.0", default-features = false } +ra-ap-rustc_abi = { version = "0.21.0", default-features = false } [features] in-rust-tree = [] From afc4075c7cf77c19195b18648718cf067bd963b2 Mon Sep 17 00:00:00 2001 From: Young-Flash <871946895@qq.com> Date: Tue, 5 Dec 2023 22:56:51 +0800 Subject: [PATCH 39/80] fix: make drop inlay hint more readable --- crates/ide/src/inlay_hints/implicit_drop.rs | 22 +++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs index 60f1f3496f6cd..9cbaed090dc78 100644 --- a/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/crates/ide/src/inlay_hints/implicit_drop.rs @@ -62,7 +62,11 @@ pub(super) fn hints( match_ast! { match expr { ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()), - _ => expr.text_range(), + // make the inlay hint appear after the semicolon if there is + _ => { + let nearest_semicolon = nearest_token_after_node(expr, syntax::SyntaxKind::SEMICOLON); + nearest_semicolon.map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()) + }, } } } @@ -95,7 +99,7 @@ pub(super) fn hints( label.append_str(")"); acc.push(InlayHint { range, - position: InlayHintPosition::Before, + position: InlayHintPosition::After, pad_left: true, pad_right: true, kind: InlayKind::Drop, @@ -109,6 +113,16 @@ pub(super) fn hints( Some(()) } +fn nearest_token_after_node( + node: &syntax::SyntaxNode, + token_type: syntax::SyntaxKind, +) -> Option { + node.siblings_with_tokens(syntax::Direction::Next) + .filter_map(|it| it.as_token().map(|it| it.clone())) + .filter(|it| it.kind() == token_type) + .next() +} + #[cfg(test)] mod tests { use crate::{ @@ -129,7 +143,7 @@ mod tests { let x = X; if 2 == 5 { return; - //^^^^^^ drop(x) + //^ drop(x) } } //^ drop(x) @@ -176,7 +190,7 @@ mod tests { let x = X; let t_opt = Some(2); let t = t_opt?; - //^^^^^^ drop(x) + //^ drop(x) Some(()) } //^ drop(x) From 5b8e386baececd1501443cb043ad43e032c0cbdb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 3 Dec 2023 20:20:38 +0100 Subject: [PATCH 40/80] Improve macro descension API --- crates/hir-expand/src/lib.rs | 2 - crates/hir/src/lib.rs | 4 +- crates/hir/src/semantics.rs | 176 ++++++++++-------- .../extract_expressions_from_format_string.rs | 9 +- .../src/handlers/extract_function.rs | 8 +- crates/ide-db/src/helpers.rs | 4 +- crates/ide-db/src/search.rs | 8 +- crates/ide/src/call_hierarchy.rs | 4 +- crates/ide/src/doc_links.rs | 8 +- crates/ide/src/expand_macro.rs | 8 +- crates/ide/src/extend_selection.rs | 19 +- crates/ide/src/goto_declaration.rs | 4 +- crates/ide/src/goto_definition.rs | 4 +- crates/ide/src/goto_implementation.rs | 4 +- crates/ide/src/goto_type_definition.rs | 3 +- crates/ide/src/highlight_related.rs | 4 +- crates/ide/src/hover.rs | 12 +- crates/ide/src/moniker.rs | 4 +- crates/ide/src/references.rs | 4 +- crates/ide/src/signature_help.rs | 7 +- crates/ide/src/syntax_highlighting.rs | 18 +- 21 files changed, 177 insertions(+), 137 deletions(-) diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 602babcc9928b..71c98b2770aa3 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -629,8 +629,6 @@ impl ExpansionInfo { pub fn map_range_down<'a>( &'a self, span: SpanData, - // FIXME: use this for range mapping, so that we can resolve inline format args - _relative_token_offset: Option, ) -> Option> + 'a> { let tokens = self .exp_map diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 22e14b6181263..53e60c5862ad6 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -92,7 +92,9 @@ pub use crate::{ attrs::{resolve_doc_path_on, HasAttrs}, diagnostics::*, has_source::HasSource, - semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, + semantics::{ + DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits, + }, }; // Be careful with these re-exports. diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index ed3d3f1a3b694..7d3c89ddb61f7 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,7 +2,11 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter, mem, ops}; +use std::{ + cell::RefCell, + fmt, iter, mem, + ops::{self, ControlFlow}, +}; use base_db::{FileId, FileRange}; use either::Either; @@ -39,6 +43,12 @@ use crate::{ TypeAlias, TypeParam, VariantDef, }; +pub enum DescendPreference { + SameText, + SameKind, + None, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item @@ -397,6 +407,7 @@ impl<'db> SemanticsImpl<'db> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { + // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) @@ -407,18 +418,19 @@ impl<'db> SemanticsImpl<'db> { }; if first == last { + // node is just the token, so descend the token self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { if let Some(node) = value.parent_ancestors().find_map(N::cast) { res.push(node) } - false + ControlFlow::Continue(()) }); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; self.descend_into_macros_impl(first, 0.into(), &mut |token| { scratch.push(token); - false + ControlFlow::Continue(()) }); let mut scratch = scratch.into_iter(); @@ -441,7 +453,7 @@ impl<'db> SemanticsImpl<'db> { } } } - false + ControlFlow::Continue(()) }, ); } @@ -453,32 +465,43 @@ impl<'db> SemanticsImpl<'db> { /// be considered for the mapping in case of inline format args. pub fn descend_into_macros( &self, + mode: DescendPreference, token: SyntaxToken, offset: TextSize, ) -> SmallVec<[SyntaxToken; 1]> { - let mut res = smallvec![]; - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { - res.push(value); - false - }); - res - } - - /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token. - /// - /// Returns the original non descended token if none of the mapped counterparts have the same text. - pub fn descend_into_macros_with_same_text( - &self, - token: SyntaxToken, - offset: TextSize, - ) -> SmallVec<[SyntaxToken; 1]> { - let text = token.text(); + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } + let fetch_kind = |token: &SyntaxToken| match token.parent() { + Some(node) => match node.kind() { + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, + _ => token.kind(), + }, + None => token.kind(), + }; + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; let mut res = smallvec![]; self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if value.text() == text { + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) + } + Dp::None => true, + }; + if is_a_match { res.push(value); } - false + ControlFlow::Continue(()) }); if res.is_empty() { res.push(token); @@ -486,45 +509,48 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_with_kind_preference( + pub fn descend_into_macros_single( &self, + mode: DescendPreference, token: SyntaxToken, offset: TextSize, ) -> SyntaxToken { + enum Dp<'t> { + SameText(&'t str), + SameKind(SyntaxKind), + None, + } let fetch_kind = |token: &SyntaxToken| match token.parent() { Some(node) => match node.kind() { - kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => { - node.parent().map_or(kind, |it| it.kind()) - } + kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, _ => token.kind(), }, None => token.kind(), }; - let preferred_kind = fetch_kind(&token); - let mut res = None; + let mode = match mode { + DescendPreference::SameText => Dp::SameText(token.text()), + DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), + DescendPreference::None => Dp::None, + }; + let mut res = token.clone(); self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { - if fetch_kind(&value) == preferred_kind { - res = Some(value); - true - } else { - if let None = res { - res = Some(value) + let is_a_match = match mode { + Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind + // special case for derive macros + || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) } - false + Dp::None => true, + }; + if is_a_match { + res = value; + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) } }); - res.unwrap_or(token) - } - - /// Descend the token into its macro call if it is part of one, returning the token in the - /// expansion that it is associated with. If `offset` points into the token's range, it will - /// be considered for the mapping in case of inline format args. - pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken { - let mut res = token.clone(); - self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| { - res = value; - true - }); res } @@ -535,7 +561,7 @@ impl<'db> SemanticsImpl<'db> { // FIXME: We might want this to be Option to be able to opt out of subrange // mapping, specifically for node downmapping _offset: TextSize, - f: &mut dyn FnMut(InFile) -> bool, + f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { // FIXME: Clean this up let _p = profile::span("descend_into_macros"); @@ -560,25 +586,24 @@ impl<'db> SemanticsImpl<'db> { let def_map = sa.resolver.def_map(); let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; - let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| { - let expansion_info = cache - .entry(macro_file) - .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); + let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file| { + let expansion_info = cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); - { - let InFile { file_id, value } = expansion_info.expanded(); - self.cache(value, file_id); - } + { + let InFile { file_id, value } = expansion_info.expanded(); + self.cache(value, file_id); + } - let mapped_tokens = expansion_info.map_range_down(span, None)?; - let len = stack.len(); + let mapped_tokens = expansion_info.map_range_down(span)?; + let len = stack.len(); - // requeue the tokens we got from mapping our current token down - stack.extend(mapped_tokens.map(Into::into)); - // if the length changed we have found a mapping for the token - (stack.len() != len).then_some(()) - }; + // requeue the tokens we got from mapping our current token down + stack.extend(mapped_tokens.map(Into::into)); + // if the length changed we have found a mapping for the token + (stack.len() != len).then_some(()) + }; // Remap the next token in the queue into a macro call its in, if it is not being remapped // either due to not being in a macro-call or because its unused push it into the result vec, @@ -598,7 +623,7 @@ impl<'db> SemanticsImpl<'db> { }); if let Some(call_id) = containing_attribute_macro_call { let file_id = call_id.as_macro_file(); - return process_expansion_for_token(&mut stack, file_id, token.as_ref()); + return process_expansion_for_token(&mut stack, file_id); } // Then check for token trees, that means we are either in a function-like macro or @@ -624,7 +649,7 @@ impl<'db> SemanticsImpl<'db> { it } }; - process_expansion_for_token(&mut stack, file_id, token.as_ref()) + process_expansion_for_token(&mut stack, file_id) } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute @@ -646,11 +671,7 @@ impl<'db> SemanticsImpl<'db> { Some(call_id) => { // resolved to a derive let file_id = call_id.as_macro_file(); - return process_expansion_for_token( - &mut stack, - file_id, - token.as_ref(), - ); + return process_expansion_for_token(&mut stack, file_id); } None => Some(adt), } @@ -682,11 +703,8 @@ impl<'db> SemanticsImpl<'db> { def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { - res = res.or(process_expansion_for_token( - &mut stack, - derive.as_macro_file(), - token.as_ref(), - )); + res = + res.or(process_expansion_for_token(&mut stack, derive.as_macro_file())); } res } else { @@ -695,7 +713,7 @@ impl<'db> SemanticsImpl<'db> { })() .is_none(); - if was_not_remapped && f(token) { + if was_not_remapped && f(token).is_break() { break; } } @@ -711,7 +729,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(token, offset)) + .map(move |token| self.descend_into_macros(DescendPreference::None, token, offset)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 31a1ff496e133..55f2fd9f6ce8f 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,4 +1,5 @@ use crate::{AssistContext, Assists}; +use hir::DescendPreference; use ide_db::{ assists::{AssistId, AssistKind}, syntax_helpers::{ @@ -34,9 +35,11 @@ pub(crate) fn extract_expressions_from_format_string( let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; - let expanded_t = ast::String::cast( - ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()), - )?; + let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single( + DescendPreference::SameKind, + fmt_string.syntax().clone(), + 0.into(), + ))?; if !is_format_string(&expanded_t) { return None; } diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 6b48d15881523..9b892ac1e9817 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -3,8 +3,8 @@ use std::iter; use ast::make; use either::Either; use hir::{ - HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics, - TypeInfo, TypeParam, + DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, + PathResolution, Semantics, TypeInfo, TypeParam, }; use ide_db::{ defs::{Definition, NameRefClass}, @@ -751,7 +751,9 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| sema.descend_into_macros(t, 0.into())) + .flat_map(|t| { + sema.descend_into_macros(DescendPreference::None, t, 0.into()) + }) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index 330af442f754d..d4b031879d11a 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -3,7 +3,7 @@ use std::collections::VecDeque; use base_db::{FileId, SourceDatabaseExt}; -use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; +use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; use syntax::{ ast::{self, make}, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, @@ -117,7 +117,7 @@ pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(token, 0.into()) { + for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 2ce036c044ad5..c1ed17503fe99 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -8,8 +8,8 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ - AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource, - Semantics, Visibility, + AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, + InRealFile, ModuleSource, Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; @@ -467,7 +467,9 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent()) + sema.descend_into_macros(DescendPreference::None, token, offset) + .into_iter() + .filter_map(|it| it.parent()) }) }; diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 6f41f51f80ec5..70391cd847b98 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -1,6 +1,6 @@ //! Entry point for call-hierarchy -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token, offset) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index ac15b6aba6189..97fa7dee306a3 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions use stdx::format_to; use url::Url; -use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs}; +use hir::{ + db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs, +}; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, defs::{Definition, NameClass, NameRefClass}, @@ -144,7 +146,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); let node = token.parent()?; let definition = match_ast! { @@ -286,7 +288,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| { + sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index a70f335ada3cf..cc878dc7196b9 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::{HirFileIdExt, InFile, Semantics}; +use hir::{DescendPreference, HirFileIdExt, InFile, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, @@ -40,8 +40,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // struct Bar; // ``` - let derive = - sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| { + let derive = sema + .descend_into_macros(DescendPreference::None, tok.clone(), 0.into()) + .into_iter() + .find_map(|descended| { let hir_file = sema.hir_file_for(&descended.parent()?); if !hir_file.is_derive_attr_pseudo_expansion(db) { return None; diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 9b2ff070c74b1..1cdbf7840e758 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -1,6 +1,6 @@ use std::iter::successors; -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::RootDatabase; use syntax::{ algo::{self, skip_trivia_token}, @@ -140,10 +140,16 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { - let fst_expanded = - sema.descend_into_macros_single(first_token.clone(), original_range.start()); - let lst_expanded = - sema.descend_into_macros_single(last_token.clone(), original_range.end()); + let fst_expanded = sema.descend_into_macros_single( + DescendPreference::None, + first_token.clone(), + original_range.start(), + ); + let lst_expanded = sema.descend_into_macros_single( + DescendPreference::None, + last_token.clone(), + original_range.end(), + ); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -157,7 +163,8 @@ fn extend_tokens_from_range( let validate = |offset: TextSize| { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = sema.descend_into_macros_single(token.clone(), offset); + let expanded = + sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset); let parent = match expanded.parent() { Some(it) => it, None => return false, diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 7e0fab42608b8..ee94dff5fb30a 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Semantics}; +use hir::{AsAssocItem, DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(original_token, offset) + .descend_into_macros(DescendPreference::None, original_token, offset) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 816f1bebee429..635f82686206e 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -4,7 +4,7 @@ use crate::{ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget, RangeInfo, TryToNav, }; -use hir::{AsAssocItem, AssocItem, Semantics}; +use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileId, FileLoader}, defs::{Definition, IdentClass}, @@ -56,7 +56,7 @@ pub(crate) fn goto_definition( }); } let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone(), offset) .into_iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 3593c5c7dd9c3..6c1b9966a88d8 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, Impl, Semantics}; +use hir::{AsAssocItem, DescendPreference, Impl, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -34,7 +34,7 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(original_token, offset) + sema.descend_into_macros(DescendPreference::None, original_token, offset) .into_iter() .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|node| match &node { diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 955923d76910d..4bd8bfdacb867 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,3 +1,4 @@ +use hir::DescendPreference; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -37,7 +38,7 @@ pub(crate) fn goto_type_definition( } }; let range = token.text_range(); - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None,token, offset) .into_iter() .filter_map(|token| { let ty = sema diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index a7f5ae92a4cac..620e59a71bf51 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,4 +1,4 @@ -use hir::Semantics; +use hir::{DescendPreference, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, defs::{Definition, IdentClass}, @@ -461,7 +461,7 @@ fn find_defs( token: SyntaxToken, offset: TextSize, ) -> FxHashSet { - sema.descend_into_macros(token, offset) + sema.descend_into_macros(DescendPreference::None, token, offset) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .map(IdentClass::definitions_no_ops) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index e0b64fe7988e5..7f2783df3d05b 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -6,7 +6,7 @@ mod tests; use std::iter; use either::Either; -use hir::{db::DefDatabase, HasSource, LangItem, Semantics}; +use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics}; use ide_db::{ base_db::FileRange, defs::{Definition, IdentClass, NameRefClass, OperatorClass}, @@ -161,11 +161,11 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important - let descended = if in_attr { - [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into() - } else { - sema.descend_into_macros_with_same_text(original_token.clone(), offset) - }; + let descended = sema.descend_into_macros( + if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, + original_token.clone(), + offset, + ); let descended = || descended.iter(); let result = descended() diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 2ca2b5b1d5f3e..28d455f4e591e 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -1,7 +1,7 @@ //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! for LSIF and LSP. -use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics}; +use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics}; use ide_db::{ base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, defs::{Definition, IdentClass}, @@ -99,7 +99,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone(), offset) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index f387bbf6b0143..2285ab199343a 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -9,7 +9,7 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. -use hir::{PathResolution, Semantics}; +use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::{Definition, NameClass, NameRefClass}, @@ -126,7 +126,7 @@ pub(crate) fn find_defs<'a>( ) }); token.map(|token| { - sema.descend_into_macros_with_same_text(token, offset) + sema.descend_into_macros(DescendPreference::SameText, token, offset) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e020b52e17104..c95f9994615d4 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -4,7 +4,10 @@ use std::collections::BTreeSet; use either::Either; -use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait}; +use hir::{ + AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, + Trait, +}; use ide_db::{ active_parameter::{callable_for_node, generic_def_for_node}, base_db::FilePosition, @@ -79,7 +82,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); for node in token.parent_ancestors() { match_ast! { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index dd72484b3807c..b2db6bb5c43f4 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -13,7 +13,7 @@ mod html; #[cfg(test)] mod tests; -use hir::{Name, Semantics}; +use hir::{DescendPreference, Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use syntax::{ ast::{self, IsString}, @@ -393,14 +393,14 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => { - sema.descend_into_macros_with_kind_preference(token, 0.into()) - } - Some(AttrOrDerive::Derive(_)) | None => { - sema.descend_into_macros_single(token, 0.into()) - } - }; + let token = sema.descend_into_macros_single( + match attr_or_derive_item { + Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, + Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, + }, + token, + 0.into(), + ); match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { From d2cd30007cf404d185d451a32c471f6ea5321ca9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 15:42:39 +0100 Subject: [PATCH 41/80] Implicit format args support --- crates/hir-def/src/body.rs | 12 ++ crates/hir-def/src/body/lower.rs | 24 ++-- crates/hir-def/src/body/tests.rs | 6 +- crates/hir-def/src/hir/format_args.rs | 14 ++- crates/hir/src/semantics.rs | 64 +++++++--- crates/hir/src/source_analyzer.rs | 66 +++++++--- .../ide-assists/src/handlers/bool_to_enum.rs | 2 + .../convert_tuple_return_type_to_struct.rs | 2 + .../extract_expressions_from_format_string.rs | 9 +- .../src/handlers/extract_function.rs | 4 +- .../ide-assists/src/handlers/inline_call.rs | 8 +- .../src/handlers/inline_local_variable.rs | 4 +- .../replace_named_generic_with_impl.rs | 5 +- .../src/handlers/unnecessary_async.rs | 6 +- crates/ide-db/src/helpers.rs | 2 +- crates/ide-db/src/rename.rs | 10 +- crates/ide-db/src/search.rs | 117 +++++++++++++++--- crates/ide/src/call_hierarchy.rs | 2 +- crates/ide/src/doc_links.rs | 4 +- crates/ide/src/expand_macro.rs | 2 +- crates/ide/src/extend_selection.rs | 23 ++-- crates/ide/src/goto_declaration.rs | 2 +- crates/ide/src/goto_definition.rs | 38 ++++-- crates/ide/src/goto_implementation.rs | 2 +- crates/ide/src/goto_type_definition.rs | 85 ++++++++++--- crates/ide/src/highlight_related.rs | 39 ++++-- crates/ide/src/hover.rs | 18 ++- crates/ide/src/hover/tests.rs | 60 +++++++++ crates/ide/src/moniker.rs | 2 +- crates/ide/src/references.rs | 50 ++++++-- crates/ide/src/rename.rs | 91 +++++++++++--- crates/ide/src/runnables.rs | 4 +- crates/ide/src/signature_help.rs | 2 +- crates/ide/src/syntax_highlighting.rs | 1 - crates/mbe/src/token_map.rs | 1 + crates/syntax/src/ast/token_ext.rs | 6 + crates/syntax/src/token_text.rs | 2 +- 37 files changed, 615 insertions(+), 174 deletions(-) diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 1942c60c075d7..db28c6731ece1 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -95,6 +95,8 @@ pub struct BodySourceMap { field_map_back: FxHashMap, pat_field_map_back: FxHashMap, + format_args_template_map: FxHashMap>, + expansions: FxHashMap>, HirFileId>, /// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in @@ -387,6 +389,14 @@ impl BodySourceMap { self.expr_map.get(&src).copied() } + pub fn implicit_format_args( + &self, + node: InFile<&ast::FormatArgsExpr>, + ) -> Option<&[(syntax::TextRange, Name)]> { + let src = node.map(AstPtr::new).map(AstPtr::upcast::); + self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref) + } + /// Get a reference to the body source map's diagnostics. pub fn diagnostics(&self) -> &[BodyDiagnostic] { &self.diagnostics @@ -403,8 +413,10 @@ impl BodySourceMap { field_map_back, pat_field_map_back, expansions, + format_args_template_map, diagnostics, } = self; + format_args_template_map.shrink_to_fit(); expr_map.shrink_to_fit(); expr_map_back.shrink_to_fit(); pat_map.shrink_to_fit(); diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 0466068ec810b..c22bd6e2e57cf 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -1597,12 +1597,20 @@ impl ExprCollector<'_> { }); let template = f.template(); let fmt_snippet = template.as_ref().map(ToString::to_string); + let mut mappings = vec![]; let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) { - Some((s, is_direct_literal)) => { - format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| { - self.alloc_expr_desugared(Expr::Path(Path::from(name))) - }) - } + Some((s, is_direct_literal)) => format_args::parse( + &s, + fmt_snippet, + args, + is_direct_literal, + |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), + |name, span| { + if let Some(span) = span { + mappings.push((span, name.clone())) + } + }, + ), None => FormatArgs { template: Default::default(), arguments: args.finish() }, }; @@ -1746,14 +1754,16 @@ impl ExprCollector<'_> { tail: Some(unsafe_arg_new), }); - self.alloc_expr( + let idx = self.alloc_expr( Expr::Call { callee: new_v1_formatted, args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]), is_assignee_expr: false, }, syntax_ptr, - ) + ); + self.source_map.format_args_template_map.insert(idx, mappings); + idx } /// Generate a hir expression for a format_args placeholder specification. diff --git a/crates/hir-def/src/body/tests.rs b/crates/hir-def/src/body/tests.rs index 5517abb1ab70f..2b432dfbb92bc 100644 --- a/crates/hir-def/src/body/tests.rs +++ b/crates/hir-def/src/body/tests.rs @@ -160,7 +160,7 @@ fn main() { let count = 10; builtin#lang(Arguments::new_v1_formatted)( &[ - "\"hello ", " ", " friends, we ", " ", "", "\"", + "hello ", " ", " friends, we ", " ", "", ], &[ builtin#lang(Argument::new_display)( @@ -261,7 +261,7 @@ impl SsrError { _ = $crate::error::SsrError::new( builtin#lang(Arguments::new_v1_formatted)( &[ - "\"Failed to resolve path `", "`\"", + "Failed to resolve path `", "`", ], &[ builtin#lang(Argument::new_display)( @@ -320,7 +320,7 @@ fn f() { $crate::panicking::panic_fmt( builtin#lang(Arguments::new_v1_formatted)( &[ - "\"cc\"", + "cc", ], &[], &[], diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs index 46d24bd4a6142..068abb27a25cf 100644 --- a/crates/hir-def/src/hir/format_args.rs +++ b/crates/hir-def/src/hir/format_args.rs @@ -5,7 +5,7 @@ use hir_expand::name::Name; use rustc_dependencies::parse_format as parse; use syntax::{ ast::{self, IsString}, - AstToken, SmolStr, TextRange, + SmolStr, TextRange, TextSize, }; use crate::hir::ExprId; @@ -170,15 +170,18 @@ pub(crate) fn parse( mut args: FormatArgumentsCollector, is_direct_literal: bool, mut synth: impl FnMut(Name) -> ExprId, + mut record_usage: impl FnMut(Name, Option), ) -> FormatArgs { - let text = s.text(); + let text = s.text_without_quotes(); let str_style = match s.quote_offsets() { Some(offsets) => { let raw = u32::from(offsets.quotes.0.len()) - 1; - (raw != 0).then_some(raw as usize) + // subtract 1 for the `r` prefix + (raw != 0).then(|| raw as usize - 1) } None => None, }; + let mut parser = parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format); @@ -199,6 +202,7 @@ pub(crate) fn parse( let to_span = |inner_span: parse::InnerSpan| { is_source_literal.then(|| { TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap()) + - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1) }) }; @@ -230,9 +234,10 @@ pub(crate) fn parse( Err(index) } } - ArgRef::Name(name, _span) => { + ArgRef::Name(name, span) => { let name = Name::new_text_dont_use(SmolStr::new(name)); if let Some((index, _)) = args.by_name(&name) { + record_usage(name, span); // Name found in `args`, so we resolve it to its index. if index < args.explicit_args().len() { // Mark it as used, if it was an explicit argument. @@ -246,6 +251,7 @@ pub(crate) fn parse( // disabled (see RFC #2795) // FIXME: Diagnose } + record_usage(name.clone(), span); Ok(args.add(FormatArgument { kind: FormatArgumentKind::Captured(name.clone()), // FIXME: This is problematic, we might want to synthesize a dummy diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 7d3c89ddb61f7..b0e0f969d98f3 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -29,8 +29,9 @@ use smallvec::{smallvec, SmallVec}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, - match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, + ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, + match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, + TextRange, TextSize, }; use crate::{ @@ -49,7 +50,7 @@ pub enum DescendPreference { None, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), @@ -402,6 +403,41 @@ impl<'db> SemanticsImpl<'db> { ) } + pub fn resolve_offset_in_format_args( + &self, + string: ast::String, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + debug_assert!(offset <= string.syntax().text_range().len()); + let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + } + + pub fn check_for_format_args_template( + &self, + original_token: SyntaxToken, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + if let Some(original_string) = ast::String::cast(original_token.clone()) { + if let Some(quote) = original_string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, original_token.clone()) + .into_iter() + .find_map(|token| { + self.resolve_offset_in_format_args( + ast::String::cast(token)?, + offset - quote.end(), + ) + }) + .map(|(range, res)| (range + quote.end(), res)); + } + } + None + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now @@ -419,8 +455,12 @@ impl<'db> SemanticsImpl<'db> { if first == last { // node is just the token, so descend the token - self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| { - if let Some(node) = value.parent_ancestors().find_map(N::cast) { + self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { + if let Some(node) = value + .parent_ancestors() + .take_while(|it| it.text_range() == value.text_range()) + .find_map(N::cast) + { res.push(node) } ControlFlow::Continue(()) @@ -428,7 +468,7 @@ impl<'db> SemanticsImpl<'db> { } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, 0.into(), &mut |token| { + self.descend_into_macros_impl(first, &mut |token| { scratch.push(token); ControlFlow::Continue(()) }); @@ -436,7 +476,6 @@ impl<'db> SemanticsImpl<'db> { let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, - 0.into(), &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { @@ -467,7 +506,6 @@ impl<'db> SemanticsImpl<'db> { &self, mode: DescendPreference, token: SyntaxToken, - offset: TextSize, ) -> SmallVec<[SyntaxToken; 1]> { enum Dp<'t> { SameText(&'t str), @@ -487,7 +525,7 @@ impl<'db> SemanticsImpl<'db> { DescendPreference::None => Dp::None, }; let mut res = smallvec![]; - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { @@ -513,7 +551,6 @@ impl<'db> SemanticsImpl<'db> { &self, mode: DescendPreference, token: SyntaxToken, - offset: TextSize, ) -> SyntaxToken { enum Dp<'t> { SameText(&'t str), @@ -533,7 +570,7 @@ impl<'db> SemanticsImpl<'db> { DescendPreference::None => Dp::None, }; let mut res = token.clone(); - self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { @@ -558,9 +595,6 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, token: SyntaxToken, - // FIXME: We might want this to be Option to be able to opt out of subrange - // mapping, specifically for node downmapping - _offset: TextSize, f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { // FIXME: Clean this up @@ -729,7 +763,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(DescendPreference::None, token, offset)) + .map(move |token| self.descend_into_macros(DescendPreference::None, token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8b1148368961d..9fdee209cfbc0 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -820,6 +820,29 @@ impl SourceAnalyzer { false } + pub(crate) fn resolve_offset_in_format_args( + &self, + db: &dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + let implicits = self.body_source_map()?.implicit_format_args(format_args)?; + implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }) + } + fn resolve_impl_method_or_trait_def( &self, db: &dyn HirDatabase, @@ -1038,24 +1061,7 @@ fn resolve_hir_path_( }; let body_owner = resolver.body_owner(); - let values = || { - resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { - let res = match val { - ValueNs::LocalBinding(binding_id) => { - let var = Local { parent: body_owner?, binding_id }; - PathResolution::Local(var) - } - ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), - ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), - ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), - ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), - ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), - ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), - ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), - }; - Some(res) - }) - }; + let values = || resolve_hir_value_path(db, resolver, body_owner, path); let items = || { resolver @@ -1075,6 +1081,30 @@ fn resolve_hir_path_( .or_else(macros) } +fn resolve_hir_value_path( + db: &dyn HirDatabase, + resolver: &Resolver, + body_owner: Option, + path: &Path, +) -> Option { + resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| { + let res = match val { + ValueNs::LocalBinding(binding_id) => { + let var = Local { parent: body_owner?, binding_id }; + PathResolution::Local(var) + } + ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), + ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), + ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), + ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), + ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()), + ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()), + ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()), + }; + Some(res) + }) +} + /// Resolves a path where we know it is a qualifier of another path. /// /// For example, if we have: diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs index 11facc5bee2ac..0f2d1057c0a45 100644 --- a/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -328,6 +328,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { range, name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module())) }) .map(|(range, name, ref_module)| { @@ -455,6 +456,7 @@ fn add_enum_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 1f3caa7db33f1..79b46d66121eb 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -186,6 +186,7 @@ fn augment_references_with_imports( references .iter() .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) }) .map(|(name, ref_module)| { @@ -238,6 +239,7 @@ fn add_tuple_struct_def( .iter() .flat_map(|(_, refs)| refs) .filter_map(|FileReference { name, .. }| { + let name = name.clone().into_name_like()?; ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 55f2fd9f6ce8f..9d72d3af096a7 100644 --- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -35,11 +35,10 @@ pub(crate) fn extract_expressions_from_format_string( let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; - let expanded_t = ast::String::cast(ctx.sema.descend_into_macros_single( - DescendPreference::SameKind, - fmt_string.syntax().clone(), - 0.into(), - ))?; + let expanded_t = ast::String::cast( + ctx.sema + .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()), + )?; if !is_format_string(&expanded_t) { return None; } diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index 9b892ac1e9817..d4a12307790be 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -751,9 +751,7 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| { - sema.descend_into_macros(DescendPreference::None, t, 0.into()) - }) + .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t)) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index f8c75bdb0de1c..5b9cc5f66cde1 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -8,7 +8,7 @@ use ide_db::{ defs::Definition, imports::insert_use::remove_path_if_in_use_stmt, path_transform::PathTransform, - search::{FileReference, SearchScope}, + search::{FileReference, FileReferenceNode, SearchScope}, source_change::SourceChangeBuilder, syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref}, RootDatabase, @@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses( ) -> (Vec, Vec) { iter.into_iter() .filter_map(|file_ref| match file_ref.name { - ast::NameLike::NameRef(name_ref) => Some(name_ref), + FileReferenceNode::NameRef(name_ref) => Some(name_ref), _ => None, }) .filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) { @@ -346,7 +346,7 @@ fn inline( match param.as_local(sema.db) { Some(l) => usages_for_locals(l) .map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => body + FileReferenceNode::NameRef(_) => body .syntax() .covering_element(range) .ancestors() @@ -372,7 +372,7 @@ fn inline( if let Some(self_local) = params[0].2.as_local(sema.db) { usages_for_locals(self_local) .filter_map(|FileReference { name, range, .. }| match name { - ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)), + FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)), _ => None, }) .for_each(|usage| { diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs index 49dcde75d2b31..5d8ba43ec8461 100644 --- a/crates/ide-assists/src/handlers/inline_local_variable.rs +++ b/crates/ide-assists/src/handlers/inline_local_variable.rs @@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ base_db::FileId, defs::Definition, - search::{FileReference, UsageSearchResult}, + search::{FileReference, FileReferenceNode, UsageSearchResult}, RootDatabase, }; use syntax::{ @@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>) let wrap_in_parens = references .into_iter() .filter_map(|FileReference { range, name, .. }| match name { - ast::NameLike::NameRef(name) => Some((range, name)), + FileReferenceNode::NameRef(name) => Some((range, name)), _ => None, }) .map(|(range, name_ref)| { diff --git a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs index c7c0be4c7d4f8..e61ce481727a1 100644 --- a/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs +++ b/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs @@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl( let mut path_types_to_replace = Vec::new(); for (_a, refs) in usage_refs.iter() { for usage_ref in refs { - let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?; + let Some(name_like) = usage_ref.name.clone().into_name_like() else { + continue; + }; + let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?; path_types_to_replace.push(param_node); } } diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index 7f612c2a142c7..1cfa291a29d8e 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -2,11 +2,11 @@ use ide_db::{ assists::{AssistId, AssistKind}, base_db::FileId, defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, syntax_helpers::node_ext::full_path_of_name_ref, }; use syntax::{ - ast::{self, NameLike, NameRef}, + ast::{self, NameRef}, AstNode, SyntaxKind, TextRange, }; @@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O for await_expr in find_all_references(ctx, &Definition::Function(fn_def)) // Keep only references that correspond NameRefs. .filter_map(|(_, reference)| match reference.name { - NameLike::NameRef(nameref) => Some(nameref), + FileReferenceNode::NameRef(nameref) => Some(nameref), _ => None, }) // Keep only references that correspond to await expressions diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index d4b031879d11a..9363bdfa14b2a 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -117,7 +117,7 @@ pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(DescendPreference::None, token, 0.into()) { + for token in sema.descend_into_macros(DescendPreference::None, token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 676d286b8dcc0..d2b6a732689c1 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder}; use crate::{ defs::Definition, - search::FileReference, + search::{FileReference, FileReferenceNode}, source_change::{FileSystemEdit, SourceChange}, syntax_helpers::node_ext::expr_as_name_ref, traits::convert_to_def_in_trait, @@ -361,7 +361,7 @@ pub fn source_edit_from_references( // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far let mut edited_ranges = Vec::new(); for &FileReference { range, ref name, .. } in references { - let name_range = name.syntax().text_range(); + let name_range = name.text_range(); if name_range.len() != range.len() { // This usage comes from a different token kind that was downmapped to a NameLike in a macro // Renaming this will most likely break things syntax-wise @@ -371,17 +371,17 @@ pub fn source_edit_from_references( // if the ranges differ then the node is inside a macro call, we can't really attempt // to make special rewrites like shorthand syntax and such, so just rename the node in // the macro input - ast::NameLike::NameRef(name_ref) if name_range == range => { + FileReferenceNode::NameRef(name_ref) if name_range == range => { source_edit_from_name_ref(&mut edit, name_ref, new_name, def) } - ast::NameLike::Name(name) if name_range == range => { + FileReferenceNode::Name(name) if name_range == range => { source_edit_from_name(&mut edit, name, new_name) } _ => false, }; if !has_emitted_edit && !edited_ranges.contains(&range.start()) { let (range, new_name) = match name { - ast::NameLike::Lifetime(_) => ( + FileReferenceNode::Lifetime(_) => ( TextRange::new(range.start() + syntax::TextSize::from(1), range.end()), new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(), ), diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index c1ed17503fe99..dbef360268224 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -9,13 +9,13 @@ use std::mem; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use hir::{ AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile, - InRealFile, ModuleSource, Semantics, Visibility, + InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use nohash_hasher::IntMap; use once_cell::unsync::Lazy; use parser::SyntaxKind; -use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; +use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize}; use triomphe::Arc; use crate::{ @@ -63,10 +63,67 @@ pub struct FileReference { /// The range of the reference in the original file pub range: TextRange, /// The node of the reference in the (macro-)file - pub name: ast::NameLike, + pub name: FileReferenceNode, pub category: Option, } +#[derive(Debug, Clone)] +pub enum FileReferenceNode { + Name(ast::Name), + NameRef(ast::NameRef), + Lifetime(ast::Lifetime), + FormatStringEntry(ast::String, TextRange), +} + +impl FileReferenceNode { + pub fn text_range(&self) -> TextRange { + match self { + FileReferenceNode::Name(it) => it.syntax().text_range(), + FileReferenceNode::NameRef(it) => it.syntax().text_range(), + FileReferenceNode::Lifetime(it) => it.syntax().text_range(), + FileReferenceNode::FormatStringEntry(_, range) => *range, + } + } + pub fn syntax(&self) -> SyntaxElement { + match self { + FileReferenceNode::Name(it) => it.syntax().clone().into(), + FileReferenceNode::NameRef(it) => it.syntax().clone().into(), + FileReferenceNode::Lifetime(it) => it.syntax().clone().into(), + FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(), + } + } + pub fn into_name_like(self) -> Option { + match self { + FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)), + FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)), + FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)), + FileReferenceNode::FormatStringEntry(_, _) => None, + } + } + pub fn as_name_ref(&self) -> Option<&ast::NameRef> { + match self { + FileReferenceNode::NameRef(name_ref) => Some(name_ref), + _ => None, + } + } + pub fn as_lifetime(&self) -> Option<&ast::Lifetime> { + match self { + FileReferenceNode::Lifetime(lifetime) => Some(lifetime), + _ => None, + } + } + pub fn text(&self) -> syntax::TokenText<'_> { + match self { + FileReferenceNode::NameRef(name_ref) => name_ref.text(), + FileReferenceNode::Name(name) => name.text(), + FileReferenceNode::Lifetime(lifetime) => lifetime.text(), + FileReferenceNode::FormatStringEntry(it, range) => { + syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()]) + } + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ReferenceCategory { // FIXME: Add this variant and delete the `retain_adt_literal_usages` function. @@ -467,7 +524,7 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(DescendPreference::None, token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|it| it.parent()) }) @@ -479,6 +536,17 @@ impl<'a> FindUsages<'a> { // Search for occurrences of the items name for offset in match_indices(&text, finder, search_range) { + tree.token_at_offset(offset).into_iter().for_each(|token| { + let Some(str_token) = ast::String::cast(token.clone()) else { return }; + if let Some((range, nameres)) = + sema.check_for_format_args_template(token.clone(), offset) + { + if self.found_format_args_ref(file_id, range, str_token, nameres, sink) { + return; + } + } + }); + for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { if match name { ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), @@ -593,7 +661,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: None, }; sink(file_id, reference) @@ -612,7 +680,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), }; sink(file_id, reference) @@ -621,6 +689,27 @@ impl<'a> FindUsages<'a> { } } + fn found_format_args_ref( + &self, + file_id: FileId, + range: TextRange, + token: ast::String, + res: Option, + sink: &mut dyn FnMut(FileId, FileReference) -> bool, + ) -> bool { + match res.map(Definition::from) { + Some(def) if def == self.def => { + let reference = FileReference { + range, + name: FileReferenceNode::FormatStringEntry(token, range), + category: Some(ReferenceCategory::Read), + }; + sink(file_id, reference) + } + _ => false, + } + } + fn found_lifetime( &self, lifetime: &ast::Lifetime, @@ -631,7 +720,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax()); let reference = FileReference { range, - name: ast::NameLike::Lifetime(lifetime.clone()), + name: FileReferenceNode::Lifetime(lifetime.clone()), category: None, }; sink(file_id, reference) @@ -655,7 +744,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -671,7 +760,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -681,7 +770,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: ReferenceCategory::new(&def, name_ref), }; sink(file_id, reference) @@ -705,7 +794,7 @@ impl<'a> FindUsages<'a> { }; let reference = FileReference { range, - name: ast::NameLike::NameRef(name_ref.clone()), + name: FileReferenceNode::NameRef(name_ref.clone()), category: access, }; sink(file_id, reference) @@ -728,7 +817,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), // FIXME: mutable patterns should have `Write` access category: Some(ReferenceCategory::Read), }; @@ -738,7 +827,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) @@ -763,7 +852,7 @@ impl<'a> FindUsages<'a> { let FileRange { file_id, range } = self.sema.original_range(name.syntax()); let reference = FileReference { range, - name: ast::NameLike::Name(name.clone()), + name: FileReferenceNode::Name(name.clone()), category: None, }; sink(file_id, reference) diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 70391cd847b98..5cc64e60ed852 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -87,7 +87,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(DescendPreference::None, token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index 97fa7dee306a3..9760f9daf0a39 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -146,7 +146,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); let node = token.parent()?; let definition = match_ast! { @@ -288,7 +288,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(DescendPreference::None,doc_token, offset).into_iter().find_map(|t| { + sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index cc878dc7196b9..653d9f0883840 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -41,7 +41,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // ``` let derive = sema - .descend_into_macros(DescendPreference::None, tok.clone(), 0.into()) + .descend_into_macros(DescendPreference::None, tok.clone()) .into_iter() .find_map(|descended| { let hir_file = sema.hir_file_for(&descended.parent()?); diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 1cdbf7840e758..b706e959d34ef 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -140,16 +140,10 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { - let fst_expanded = sema.descend_into_macros_single( - DescendPreference::None, - first_token.clone(), - original_range.start(), - ); - let lst_expanded = sema.descend_into_macros_single( - DescendPreference::None, - last_token.clone(), - original_range.end(), - ); + let fst_expanded = + sema.descend_into_macros_single(DescendPreference::None, first_token.clone()); + let lst_expanded = + sema.descend_into_macros_single(DescendPreference::None, last_token.clone()); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -160,11 +154,10 @@ fn extend_tokens_from_range( }; // Compute parent node range - let validate = |offset: TextSize| { + let validate = || { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = - sema.descend_into_macros_single(DescendPreference::None, token.clone(), offset); + let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone()); let parent = match expanded.parent() { Some(it) => it, None => return false, @@ -178,14 +171,14 @@ fn extend_tokens_from_range( let token = token.prev_token()?; skip_trivia_token(token, Direction::Prev) }) - .take_while(validate(original_range.start())) + .take_while(validate()) .last()?; let last = successors(Some(last_token), |token| { let token = token.next_token()?; skip_trivia_token(token, Direction::Next) }) - .take_while(validate(original_range.end())) + .take_while(validate()) .last()?; let range = first.text_range().cover(last.text_range()); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index ee94dff5fb30a..ad7ec1964567b 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(DescendPreference::None, original_token, offset) + .descend_into_macros(DescendPreference::None, original_token) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 635f82686206e..5ca82a362f53e 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -55,8 +55,21 @@ pub(crate) fn goto_definition( Some(RangeInfo::new(link_range, vec![nav])) }); } + + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + return Some(RangeInfo::new( + range, + match resolution { + Some(res) => def_to_nav(db, Definition::from(res)), + None => vec![], + }, + )); + } + let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; @@ -809,18 +822,13 @@ mod confuse_index { fn foo(); } fn goto_through_format() { check( r#" +//- minicore: fmt #[macro_export] macro_rules! format { ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*))) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args { - ($fmt:expr) => ({ /* compiler built-in */ }); - ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) -} pub mod __export { - pub use crate::format_args; + pub use core::format_args; fn foo() {} // for index confusion } fn foo() -> i8 {} @@ -2056,6 +2064,20 @@ fn f2() { struct S2; S1::e$0(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "world"; + // ^ + format_args!("hello {a$0}"); +} "#, ); } diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index 6c1b9966a88d8..bb474282dd7c8 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -34,7 +34,7 @@ pub(crate) fn goto_implementation( })?; let range = original_token.text_range(); let navs = - sema.descend_into_macros(DescendPreference::None, original_token, offset) + sema.descend_into_macros(DescendPreference::None, original_token) .into_iter() .filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|node| match &node { diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 4bd8bfdacb867..83f134aaaf576 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -1,4 +1,4 @@ -use hir::DescendPreference; +use hir::{DescendPreference, GenericParam}; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -37,8 +37,37 @@ pub(crate) fn goto_type_definition( } } }; + let mut process_ty = |ty: hir::Type| { + // collect from each `ty` into the `res` result vec + let ty = ty.strip_references(); + ty.walk(db, |t| { + if let Some(adt) = t.as_adt() { + push(adt.into()); + } else if let Some(trait_) = t.as_dyn_trait() { + push(trait_.into()); + } else if let Some(traits) = t.as_impl_traits(db) { + traits.for_each(|it| push(it.into())); + } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { + push(trait_.into()); + } + }); + }; + if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + if let Some(ty) = resolution.and_then(|res| match Definition::from(res) { + Definition::Const(it) => Some(it.ty(db)), + Definition::Static(it) => Some(it.ty(db)), + Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)), + Definition::Local(it) => Some(it.ty(db)), + Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)), + _ => None, + }) { + process_ty(ty); + } + return Some(RangeInfo::new(range, res)); + } + let range = token.text_range(); - sema.descend_into_macros(DescendPreference::None,token, offset) + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| { let ty = sema @@ -76,21 +105,7 @@ pub(crate) fn goto_type_definition( }); ty }) - .for_each(|ty| { - // collect from each `ty` into the `res` result vec - let ty = ty.strip_references(); - ty.walk(db, |t| { - if let Some(adt) = t.as_adt() { - push(adt.into()); - } else if let Some(trait_) = t.as_dyn_trait() { - push(trait_.into()); - } else if let Some(traits) = t.as_impl_traits(db) { - traits.for_each(|it| push(it.into())); - } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { - push(trait_.into()); - } - }); - }); + .for_each(process_ty); Some(RangeInfo::new(range, res)) } @@ -326,6 +341,42 @@ struct Baz(T); //^^^ fn foo(x$0: Bar, Baz) {} +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + let a = Bar; + format_args!("hello {a$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ + fn test() { + format_args!("hello {Bar$0}"); +} +"#, + ); + check( + r#" +//- minicore: fmt +struct Bar; + // ^^^ +const BAR: Bar = Bar; +fn test() { + format_args!("hello {BAR$0}"); +} "#, ); } diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 620e59a71bf51..8daff8c2ebe3a 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1,3 +1,5 @@ +use std::iter; + use hir::{DescendPreference, Semantics}; use ide_db::{ base_db::{FileId, FilePosition, FileRange}, @@ -15,7 +17,6 @@ use syntax::{ SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxNode, SyntaxToken, TextRange, T, }; -use text_edit::TextSize; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; @@ -132,7 +133,16 @@ fn highlight_references( token: SyntaxToken, FilePosition { file_id, offset }: FilePosition, ) -> Option> { - let defs = find_defs(sema, token.clone(), offset); + let defs = if let Some((range, resolution)) = + sema.check_for_format_args_template(token.clone(), offset) + { + match resolution.map(Definition::from) { + Some(def) => iter::once(def).collect(), + None => return Some(vec![HighlightedRange { range, category: None }]), + } + } else { + find_defs(sema, token.clone()) + }; let usages = defs .iter() .filter_map(|&d| { @@ -456,12 +466,8 @@ fn cover_range(r0: Option, r1: Option) -> Option, - token: SyntaxToken, - offset: TextSize, -) -> FxHashSet { - sema.descend_into_macros(DescendPreference::None, token, offset) +fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet { + sema.descend_into_macros(DescendPreference::None, token) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .map(IdentClass::definitions_no_ops) @@ -1620,6 +1626,23 @@ fn f2(t: T) { T::C; T::f(); } +"#, + ); + } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + // ^ + format_args!("hello {a} {a$0} {}", a); + // ^read + // ^read + // ^read +} "#, ); } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 7f2783df3d05b..88a5b623425d1 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -150,6 +150,19 @@ fn hover_simple( }); } + if let Some((range, resolution)) = + sema.check_for_format_args_template(original_token.clone(), offset) + { + let res = hover_for_definition( + sema, + file_id, + Definition::from(resolution?), + &original_token.parent()?, + config, + )?; + return Some(RangeInfo::new(range, res)); + } + let in_attr = original_token .parent_ancestors() .filter_map(ast::Item::cast) @@ -164,7 +177,6 @@ fn hover_simple( let descended = sema.descend_into_macros( if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, original_token.clone(), - offset, ); let descended = || descended.iter(); @@ -298,11 +310,11 @@ pub(crate) fn hover_for_definition( sema: &Semantics<'_, RootDatabase>, file_id: FileId, definition: Definition, - node: &SyntaxNode, + scope_node: &SyntaxNode, config: &HoverConfig, ) -> Option { let famous_defs = match &definition { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), _ => None, }; render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index d3d492f3fdef0..53585624b68a3 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6613,3 +6613,63 @@ fn test() { "#]], ); } + +#[test] +fn format_args_implicit() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{aaaaa$0}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit2() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!("{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} + +#[test] +fn format_args_implicit_raw() { + check( + r#" +//- minicore: fmt +fn test() { +let aaaaa = "foo"; +format_args!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index 28d455f4e591e..8e8bb5e0139ea 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -99,7 +99,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone(), offset) + .descend_into_macros(DescendPreference::None, original_token.clone()) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 2285ab199343a..b805ddfa26789 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -109,7 +109,7 @@ pub(crate) fn find_all_refs( } None => { let search = make_searcher(false); - Some(find_defs(sema, &syntax, position.offset)?.map(search).collect()) + Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect()) } } } @@ -118,15 +118,27 @@ pub(crate) fn find_defs<'a>( sema: &'a Semantics<'_, RootDatabase>, syntax: &SyntaxNode, offset: TextSize, -) -> Option + 'a> { +) -> Option + 'a> { let token = syntax.token_at_offset(offset).find(|t| { matches!( t.kind(), - IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | STRING + | T![self] + | T![super] + | T![crate] + | T![Self] ) - }); - token.map(|token| { - sema.descend_into_macros(DescendPreference::SameText, token, offset) + })?; + + if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) { + return resolution.map(Definition::from).map(|it| vec![it]); + } + + Some( + sema.descend_into_macros(DescendPreference::SameText, token) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { @@ -162,7 +174,8 @@ pub(crate) fn find_defs<'a>( }; Some(def) }) - }) + .collect(), + ) } pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool { @@ -2092,4 +2105,27 @@ fn main() { r#fn(); } "#]], ); } + + #[test] + fn implicit_format_args() { + check( + r#" +//- minicore: fmt +fn test() { + let a = "foo"; + format_args!("hello {a} {a$0} {}", a); + // ^ + // ^ + // ^ +} +"#, + expect![[r#" + a Local FileId(0) 20..21 20..21 + + FileId(0) 56..57 Read + FileId(0) 60..61 Read + FileId(0) 68..69 Read + "#]], + ); + } } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index ad1eb2499718f..1febfabfcb7f4 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -6,14 +6,16 @@ use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ - base_db::FileId, + base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, RootDatabase, }; use itertools::Itertools; use stdx::{always, never}; -use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize}; +use syntax::{ + ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize, +}; use text_edit::TextEdit; @@ -34,23 +36,20 @@ pub(crate) fn prepare_rename( let syntax = source_file.syntax(); let res = find_definitions(&sema, syntax, position)? - .map(|(name_like, def)| { + .map(|(frange, kind, def)| { // ensure all ranges are valid if def.range_for_rename(&sema).is_none() { bail!("No references found at position") } - let Some(frange) = sema.original_range_opt(name_like.syntax()) else { - bail!("No references found at position"); - }; always!( frange.range.contains_inclusive(position.offset) && frange.file_id == position.file_id ); - Ok(match name_like { - ast::NameLike::Lifetime(_) => { + Ok(match kind { + SyntaxKind::LIFETIME => { TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end()) } _ => frange.range, @@ -93,7 +92,7 @@ pub(crate) fn rename( let defs = find_definitions(&sema, syntax, position)?; let ops: RenameResult> = defs - .map(|(_namelike, def)| { + .map(|(.., def)| { if let Definition::Local(local) = def { if let Some(self_param) = local.as_self_param(sema.db) { cov_mark::hit!(rename_self_to_param); @@ -134,11 +133,27 @@ pub(crate) fn will_rename_file( fn find_definitions( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, - position: FilePosition, -) -> RenameResult> { - let symbols = sema - .find_nodes_at_offset_with_descend::(syntax, position.offset) - .map(|name_like| { + FilePosition { file_id, offset }: FilePosition, +) -> RenameResult> { + let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING)); + + if let Some((range, Some(resolution))) = + token.and_then(|token| sema.check_for_format_args_template(token, offset)) + { + return Ok(vec![( + FileRange { file_id, range }, + SyntaxKind::STRING, + Definition::from(resolution), + )] + .into_iter()); + } + + let symbols = + sema.find_nodes_at_offset_with_descend::(syntax, offset).map(|name_like| { + let kind = name_like.syntax().kind(); + let range = sema + .original_range_opt(name_like.syntax()) + .ok_or_else(|| format_err!("No references found at position"))?; let res = match &name_like { // renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet ast::NameLike::Name(name) @@ -163,7 +178,6 @@ fn find_definitions( Definition::Local(local_def) } }) - .map(|def| (name_like.clone(), def)) .ok_or_else(|| format_err!("No references found at position")), ast::NameLike::NameRef(name_ref) => { NameRefClass::classify(sema, name_ref) @@ -187,7 +201,7 @@ fn find_definitions( { Err(format_err!("Renaming aliases is currently unsupported")) } else { - Ok((name_like.clone(), def)) + Ok(def) } }) } @@ -203,11 +217,10 @@ fn find_definitions( _ => None, }) }) - .map(|def| (name_like, def)) .ok_or_else(|| format_err!("No references found at position")) } }; - res + res.map(|def| (range, kind, def)) }); let res: RenameResult> = symbols.collect(); @@ -218,7 +231,7 @@ fn find_definitions( Err(format_err!("No references found at position")) } else { // remove duplicates, comparing `Definition`s - Ok(v.into_iter().unique_by(|t| t.1)) + Ok(v.into_iter().unique_by(|&(.., def)| def).collect::>().into_iter()) } } Err(e) => Err(e), @@ -2663,4 +2676,44 @@ struct A; "error: Cannot rename a non-local definition.", ) } + + #[test] + fn implicit_format_args() { + check( + "fbar", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fbar = "foo"; + format_args!("hello {fbar} {fbar} {}", fbar); +} +"#, + ); + } + + #[test] + fn implicit_format_args2() { + check( + "fo", + r#" +//- minicore: fmt +fn test() { + let foo = "foo"; + format_args!("hello {foo} {foo$0} {}", foo); +} +"#, + r#" +fn test() { + let fo = "foo"; + format_args!("hello {fo} {fo} {}", fo); +} +"#, + ); + } } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 954a364c78707..d487a538bb574 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -9,7 +9,7 @@ use ide_db::{ defs::Definition, documentation::docs_from_attrs, helpers::visit_file_defs, - search::SearchScope, + search::{FileReferenceNode, SearchScope}, FxHashMap, FxHashSet, RootDatabase, SymbolKind, }; use itertools::Itertools; @@ -240,7 +240,7 @@ fn find_related_tests( .flatten(); for ref_ in defs { let name_ref = match ref_.name { - ast::NameLike::NameRef(name_ref) => name_ref, + FileReferenceNode::NameRef(name_ref) => name_ref, _ => continue, }; if let Some(fn_def) = diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index c95f9994615d4..990376a49659d 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -82,7 +82,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(DescendPreference::None, token, offset); + let token = sema.descend_into_macros_single(DescendPreference::None, token); for node in token.parent_ancestors() { match_ast! { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index b2db6bb5c43f4..ac14afa132089 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -399,7 +399,6 @@ fn traverse( Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, }, token, - 0.into(), ); match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index b51d7575a113f..28b39b4f1eec4 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -35,6 +35,7 @@ impl SpanMap { /// /// Note this does a linear search through the entire backing vector. pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ { + // FIXME: This should ignore the syntax context! self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { if s != span { return None; diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8cc271d226c4e..d5d565a015a0d 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -121,6 +121,7 @@ impl ast::Whitespace { } } +#[derive(Debug)] pub struct QuoteOffsets { pub quotes: (TextRange, TextRange), pub contents: TextRange, @@ -167,6 +168,11 @@ pub trait IsString: AstToken { fn text_range_between_quotes(&self) -> Option { self.quote_offsets().map(|it| it.contents) } + fn text_without_quotes(&self) -> &str { + let text = self.text(); + let Some(offsets) = self.text_range_between_quotes() else { return text }; + &text[offsets - self.syntax().text_range().start()] + } fn open_quote_text_range(&self) -> Option { self.quote_offsets().map(|it| it.quotes.0) } diff --git a/crates/syntax/src/token_text.rs b/crates/syntax/src/token_text.rs index 09c080c0c2301..e69deb49ce142 100644 --- a/crates/syntax/src/token_text.rs +++ b/crates/syntax/src/token_text.rs @@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> { } impl<'a> TokenText<'a> { - pub(crate) fn borrowed(text: &'a str) -> Self { + pub fn borrowed(text: &'a str) -> Self { TokenText(Repr::Borrowed(text)) } From fe0a85ca29fdb9614f10a9aa2ef52fd878d79045 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 16:30:57 +0100 Subject: [PATCH 42/80] Resolve implicit format args in syntax highlighting --- crates/hir/src/semantics.rs | 45 ++++++++++++++----- crates/hir/src/source_analyzer.rs | 23 ++++++++++ crates/ide/src/syntax_highlighting.rs | 2 +- crates/ide/src/syntax_highlighting/format.rs | 20 ++++++++- .../ide/src/syntax_highlighting/highlight.rs | 2 +- .../test_data/highlight_macros.html | 10 +---- .../test_data/highlight_strings.html | 43 +++++++----------- crates/ide/src/syntax_highlighting/tests.rs | 36 +++++---------- crates/test-utils/src/minicore.rs | 29 ++++++++++++ 9 files changed, 136 insertions(+), 74 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b0e0f969d98f3..d2fd63428b431 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -403,17 +403,29 @@ impl<'db> SemanticsImpl<'db> { ) } - pub fn resolve_offset_in_format_args( + pub fn as_format_args_parts( &self, - string: ast::String, - offset: TextSize, - ) -> Option<(TextRange, Option)> { - debug_assert!(offset <= string.syntax().text_range().len()); - let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; - let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; - let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; - let format_args = self.wrap_node_infile(format_args); - source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + string: &ast::String, + ) -> Option)>> { + if let Some(quote) = string.open_quote_text_range() { + return self + .descend_into_macros(DescendPreference::SameText, string.syntax().clone()) + .into_iter() + .find_map(|token| { + let string = ast::String::cast(token)?; + let literal = + string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + let res = source_analyzer + .as_format_args_parts(self.db, format_args.as_ref())? + .map(|(range, res)| (range + quote.end(), res)) + .collect(); + Some(res) + }); + } + None } pub fn check_for_format_args_template( @@ -438,6 +450,19 @@ impl<'db> SemanticsImpl<'db> { None } + fn resolve_offset_in_format_args( + &self, + string: ast::String, + offset: TextSize, + ) -> Option<(TextRange, Option)> { + debug_assert!(offset <= string.syntax().text_range().len()); + let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) + } + /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 9fdee209cfbc0..8afa7e0659475 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -843,6 +843,29 @@ impl SourceAnalyzer { }) } + pub(crate) fn as_format_args_parts<'a>( + &'a self, + db: &'a dyn HirDatabase, + format_args: InFile<&ast::FormatArgsExpr>, + ) -> Option)> + 'a> { + Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map( + move |(range, name)| { + ( + *range, + resolve_hir_value_path( + db, + &self.resolver, + self.resolver.body_owner(), + &Path::from_known_path_with_no_generic(ModPath::from_segments( + PathKind::Plain, + Some(name.clone()), + )), + ), + ) + }, + )) + } + fn resolve_impl_method_or_trait_def( &self, db: &dyn HirDatabase, diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index ac14afa132089..45582f54b46c3 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -440,7 +440,7 @@ fn traverse( { continue; } - highlight_format_string(hl, &string, &expanded_string, range); + highlight_format_string(hl, sema, krate, &string, &expanded_string, range); if !string.is_raw() { highlight_escape_string(hl, &string, range.start()); diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 2ef1315945a04..518e71454798f 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -1,14 +1,20 @@ //! Syntax highlighting for format macro strings. use ide_db::{ + defs::Definition, syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier}, SymbolKind, }; use syntax::{ast, TextRange}; -use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag}; +use crate::{ + syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, + HlRange, HlTag, +}; pub(super) fn highlight_format_string( stack: &mut Highlights, + sema: &hir::Semantics<'_, ide_db::RootDatabase>, + krate: hir::Crate, string: &ast::String, expanded_string: &ast::String, range: TextRange, @@ -27,6 +33,18 @@ pub(super) fn highlight_format_string( }); } }); + + if let Some(parts) = sema.as_format_args_parts(string) { + parts.into_iter().for_each(|(range, res)| { + if let Some(res) = res { + stack.add(HlRange { + range, + highlight: highlight_def(sema, krate, Definition::from(res)), + binding_hash: None, + }) + } + }) + } } fn highlight_format_specifier(kind: FormatSpecifier) -> Option { diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 1bffab29cf93a..d510c11c3d5a9 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -348,7 +348,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 { hash((name, shadow_count)) } -fn highlight_def( +pub(super) fn highlight_def( sema: &Semantics<'_, RootDatabase>, krate: hir::Crate, def: Definition, diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 06b66b302ae02..b8d38a60fc016 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -90,17 +90,11 @@ } } -#[rustc_builtin_macro] -macro_rules! concat {} -#[rustc_builtin_macro] -macro_rules! include {} -#[rustc_builtin_macro] -macro_rules! format_args {} -include!(concat!("foo/", "foo.rs")); +include!(concat!("foo/", "foo.rs")); fn main() { - format_args!("Hello, {}!", 92); + format_args!("Hello, {}!", 92); dont_color_me_braces!(); noop!(noop!(1)); } diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index aa79cd9b829f7..b40295684df9f 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -48,42 +48,29 @@ $crate::io::_print(format_args_nl!($($arg)*)); }) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args_nl {} mod panic { pub macro panic_2015 { () => ( - $crate::panicking::panic("explicit panic") + panic("explicit panic") ), ($msg:literal $(,)?) => ( - $crate::panicking::panic($msg) + panic($msg) ), // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint. ($msg:expr $(,)?) => ( - $crate::panicking::panic_str($msg) + panic_str($msg) ), // Special-case the single-argument case for const_panic. ("{}", $arg:expr $(,)?) => ( - $crate::panicking::panic_display(&$arg) + panic_display(&$arg) ), ($fmt:expr, $($arg:tt)+) => ( - $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+)) + panic_fmt(const_format_args!($fmt, $($arg)+)) ), } } -#[rustc_builtin_macro(std_panic)] -#[macro_export] -macro_rules! panic {} -#[rustc_builtin_macro] -macro_rules! assert {} -#[rustc_builtin_macro] -macro_rules! asm {} -#[rustc_builtin_macro] -macro_rules! concat {} - macro_rules! toho { () => ($crate::panic!("not yet implemented")); ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); @@ -165,20 +152,22 @@ println!("{ничоси}", ничоси = 92); println!("{:x?} {} ", thingy, n2); - panic!("{}", 0); - panic!("more {}", 1); - assert!(true, "{}", 1); - assert!(true, "{} asdasd", 1); + panic!("{}", 0); + panic!("more {}", 1); + assert!(true, "{}", 1); + assert!(true, "{} asdasd", 1); toho!("{}fmt", 0); let i: u64 = 3; let o: u64; - asm!( - "mov {0}, {1}", - "add {0}, 5", + asm!( + "mov {0}, {1}", + "add {0}, 5", out(reg) o, in(reg) i, ); - format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + const CONSTANT: () = (): + let mut m = (); + format_args!(concat!("{}"), "{}"); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 542d8992531f5..935b6b2cb9b16 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -48,6 +48,7 @@ fn macros() { check_highlighting( r#" //- proc_macros: mirror +//- minicore: fmt, include, concat //- /lib.rs crate:lib proc_macros::mirror! { { @@ -96,12 +97,6 @@ macro without_args { } } -#[rustc_builtin_macro] -macro_rules! concat {} -#[rustc_builtin_macro] -macro_rules! include {} -#[rustc_builtin_macro] -macro_rules! format_args {} include!(concat!("foo/", "foo.rs")); @@ -401,48 +396,35 @@ fn test_string_highlighting() { // thus, we have to copy the macro definition from `std` check_highlighting( r#" -//- minicore: fmt +//- minicore: fmt, assert, asm, concat, panic macro_rules! println { ($($arg:tt)*) => ({ $crate::io::_print(format_args_nl!($($arg)*)); }) } -#[rustc_builtin_macro] -#[macro_export] -macro_rules! format_args_nl {} mod panic { pub macro panic_2015 { () => ( - $crate::panicking::panic("explicit panic") + panic("explicit panic") ), ($msg:literal $(,)?) => ( - $crate::panicking::panic($msg) + panic($msg) ), // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint. ($msg:expr $(,)?) => ( - $crate::panicking::panic_str($msg) + panic_str($msg) ), // Special-case the single-argument case for const_panic. ("{}", $arg:expr $(,)?) => ( - $crate::panicking::panic_display(&$arg) + panic_display(&$arg) ), ($fmt:expr, $($arg:tt)+) => ( - $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+)) + panic_fmt(const_format_args!($fmt, $($arg)+)) ), } } -#[rustc_builtin_macro(std_panic)] -#[macro_export] -macro_rules! panic {} -#[rustc_builtin_macro] -macro_rules! assert {} -#[rustc_builtin_macro] -macro_rules! asm {} -#[rustc_builtin_macro] -macro_rules! concat {} - macro_rules! toho { () => ($crate::panic!("not yet implemented")); ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); @@ -538,8 +520,10 @@ fn main() { in(reg) i, ); + const CONSTANT: () = (): + let mut m = (); format_args!(concat!("{}"), "{}"); - format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); }"#, expect_file!["./test_data/highlight_strings.html"], false, diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index d39d62f3620d2..f766747d707ca 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -9,6 +9,8 @@ //! //! Available flags: //! add: +//! asm: +//! assert: //! as_ref: sized //! bool_impl: option, fn //! builtin_impls: @@ -1366,6 +1368,26 @@ mod macros { } // endregion:panic + // region:asm + #[macro_export] + #[rustc_builtin_macro] + macro_rules! asm { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:asm + + // region:assert + #[macro_export] + #[rustc_builtin_macro] + macro_rules! assert { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:assert + // region:fmt #[macro_export] #[rustc_builtin_macro] @@ -1381,6 +1403,13 @@ mod macros { ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; } + #[macro_export] + #[rustc_builtin_macro] + macro_rules! format_args_nl { + ($fmt:expr) => {{ /* compiler built-in */ }}; + ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }}; + } + #[macro_export] macro_rules! print { ($($arg:tt)*) => {{ From 4525787ed5275c036260340116f0408344a44c74 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 16:32:49 +0100 Subject: [PATCH 43/80] Add test for implicit format args support through nested macro call --- crates/ide/src/hover/tests.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 53585624b68a3..8c9d58671e698 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -6673,3 +6673,28 @@ format_args!(r"{$0aaaaa}"); "#]], ); } + +#[test] +fn format_args_implicit_nested() { + check( + r#" +//- minicore: fmt +macro_rules! foo { + ($($tt:tt)*) => { + format_args!($($tt)*) + } +} +fn test() { +let aaaaa = "foo"; +foo!(r"{$0aaaaa}"); +} +"#, + expect![[r#" + *aaaaa* + + ```rust + let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1 + ``` + "#]], + ); +} From 9b7ec5e31be75ed49b2cc7f341fd93ca6999a303 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 5 Dec 2023 17:04:59 +0100 Subject: [PATCH 44/80] Ignore strings in token trees in syntax highlighting --- crates/hir/src/semantics.rs | 2 +- crates/ide/src/syntax_highlighting.rs | 26 ++++++++++++++----- .../test_data/highlight_strings.html | 5 ++++ crates/ide/src/syntax_highlighting/tests.rs | 5 ++++ 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index d2fd63428b431..ac70c27785c52 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -606,8 +606,8 @@ impl<'db> SemanticsImpl<'db> { } Dp::None => true, }; + res = value; if is_a_match { - res = value; ControlFlow::Break(()) } else { ControlFlow::Continue(()) diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 45582f54b46c3..366a3c969f933 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -393,13 +393,25 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = sema.descend_into_macros_single( - match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, - Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, - }, - token, - ); + let token = if token.kind() == STRING { + // for strings, try to prefer a string that has not been lost in a token + // tree + // FIXME: This should be done for everything, but check perf first + sema.descend_into_macros(DescendPreference::SameKind, token) + .into_iter() + .max_by_key(|it| { + it.parent().map_or(false, |it| it.kind() != TOKEN_TREE) + }) + .unwrap() + } else { + sema.descend_into_macros_single( + match attr_or_derive_item { + Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, + Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, + }, + token, + ) + }; match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index b40295684df9f..75cb6223e0e62 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -76,6 +76,10 @@ ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); } +macro_rules! reuse_twice { + ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; +} + fn main() { let a = '\n'; let a = '\t'; @@ -170,4 +174,5 @@ let mut m = (); format_args!(concat!("{}"), "{}"); format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + reuse_twice!("{backslash}"); } \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 935b6b2cb9b16..fcfd3c92571b5 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -430,6 +430,10 @@ macro_rules! toho { ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+))); } +macro_rules! reuse_twice { + ($literal:literal) => {{stringify!($literal); format_args!($literal)}}; +} + fn main() { let a = '\n'; let a = '\t'; @@ -524,6 +528,7 @@ fn main() { let mut m = (); format_args!(concat!("{}"), "{}"); format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); + reuse_twice!("{backslash}"); }"#, expect_file!["./test_data/highlight_strings.html"], false, From 9cb13b6efb2578dedb8521cf9442c56df50f8d0b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 Dec 2023 11:53:28 +0100 Subject: [PATCH 45/80] Allow navigation targets to be duplicated when the focus range lies in the macro definition site --- Cargo.lock | 1 + crates/base-db/src/fixture.rs | 7 +- crates/base-db/src/input.rs | 20 +- crates/base-db/src/span.rs | 20 +- crates/hir-def/src/attr/tests.rs | 2 +- crates/hir-def/src/generics.rs | 2 +- crates/hir-expand/src/files.rs | 34 + crates/hir-expand/src/fixup.rs | 12 +- crates/hir-expand/src/hygiene.rs | 6 + crates/hir-expand/src/lib.rs | 15 + crates/hir-expand/src/quote.rs | 4 +- crates/hir/src/attrs.rs | 2 +- crates/hir/src/lib.rs | 1 + crates/hir/src/symbols.rs | 10 +- .../ide-db/src/test_data/test_doc_alias.txt | 70 +- .../test_symbol_index_collection.txt | 290 +++++---- crates/ide/Cargo.toml | 1 + crates/ide/src/call_hierarchy.rs | 10 +- crates/ide/src/doc_links/tests.rs | 8 +- crates/ide/src/goto_declaration.rs | 1 + crates/ide/src/goto_definition.rs | 18 +- crates/ide/src/goto_implementation.rs | 8 +- crates/ide/src/goto_type_definition.rs | 8 +- crates/ide/src/highlight_related.rs | 36 +- crates/ide/src/hover.rs | 21 +- crates/ide/src/inlay_hints.rs | 1 + crates/ide/src/lib.rs | 5 +- crates/ide/src/navigation_target.rs | 616 +++++++++++------- crates/ide/src/parent_module.rs | 4 +- crates/ide/src/references.rs | 51 +- crates/ide/src/runnables.rs | 12 +- crates/ide/src/static_index.rs | 8 +- crates/proc-macro-api/src/msg.rs | 6 +- crates/project-model/src/tests.rs | 4 +- crates/rust-analyzer/src/handlers/request.rs | 2 +- crates/rust-analyzer/src/lsp/to_proto.rs | 2 +- crates/syntax/src/ptr.rs | 7 +- crates/vfs/src/lib.rs | 14 +- 38 files changed, 851 insertions(+), 488 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5ba5c15a1d3d4..876ba2546a38f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -608,6 +608,7 @@ dependencies = [ name = "ide" version = "0.0.0" dependencies = [ + "arrayvec", "cfg", "cov-mark", "crossbeam-channel", diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index cfba01a032984..bfdd21555f0aa 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -135,7 +135,7 @@ impl ChangeFixture { let mut file_set = FileSet::default(); let mut current_source_root_kind = SourceRootKind::Local; - let mut file_id = FileId(0); + let mut file_id = FileId::from_raw(0); let mut roots = Vec::new(); let mut file_position = None; @@ -210,7 +210,7 @@ impl ChangeFixture { let path = VfsPath::new_virtual_path(meta.path); file_set.insert(file_id, path); files.push(file_id); - file_id.0 += 1; + file_id = FileId::from_raw(file_id.index() + 1); } if crates.is_empty() { @@ -255,7 +255,7 @@ impl ChangeFixture { if let Some(mini_core) = mini_core { let core_file = file_id; - file_id.0 += 1; + file_id = FileId::from_raw(file_id.index() + 1); let mut fs = FileSet::default(); fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); @@ -296,7 +296,6 @@ impl ChangeFixture { let mut proc_macros = ProcMacros::default(); if !proc_macro_names.is_empty() { let proc_lib_file = file_id; - file_id.0 += 1; proc_macro_defs.extend(default_test_proc_macros()); let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs); diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 12b449932dd73..c2472363aacd3 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -880,7 +880,7 @@ mod tests { fn detect_cyclic_dependency_indirect() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -893,7 +893,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -906,7 +906,7 @@ mod tests { None, ); let crate3 = graph.add_crate_root( - FileId(3u32), + FileId::from_raw(3u32), Edition2018, None, None, @@ -942,7 +942,7 @@ mod tests { fn detect_cyclic_dependency_direct() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -955,7 +955,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -985,7 +985,7 @@ mod tests { fn it_works() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -998,7 +998,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, @@ -1011,7 +1011,7 @@ mod tests { None, ); let crate3 = graph.add_crate_root( - FileId(3u32), + FileId::from_raw(3u32), Edition2018, None, None, @@ -1041,7 +1041,7 @@ mod tests { fn dashes_are_normalized() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( - FileId(1u32), + FileId::from_raw(1u32), Edition2018, None, None, @@ -1054,7 +1054,7 @@ mod tests { None, ); let crate2 = graph.add_crate_root( - FileId(2u32), + FileId::from_raw(2u32), Edition2018, None, None, diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 6723bf97fea58..3464f4cb6d1c7 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -70,7 +70,7 @@ impl fmt::Debug for SpanAnchor { } impl tt::SpanAnchor for SpanAnchor { - const DUMMY: Self = SpanAnchor { file_id: FileId(0), ast_id: ROOT_ERASED_FILE_AST_ID }; + const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID }; } /// Input to the analyzer is a set of files, where each file is identified by @@ -99,12 +99,6 @@ impl From for u32 { } } -impl From for HirFileId { - fn from(value: u32) -> Self { - HirFileId(value) - } -} - impl From for HirFileId { fn from(value: MacroCallId) -> Self { value.as_file() @@ -147,7 +141,7 @@ pub enum HirFileIdRepr { impl fmt::Debug for HirFileIdRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(), + Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(), Self::MacroFile(arg0) => { f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish() } @@ -156,9 +150,9 @@ impl fmt::Debug for HirFileIdRepr { } impl From for HirFileId { - fn from(FileId(id): FileId) -> Self { - assert!(id < Self::MAX_FILE_ID); - HirFileId(id) + fn from(id: FileId) -> Self { + assert!(id.index() < Self::MAX_FILE_ID); + HirFileId(id.index()) } } @@ -192,7 +186,7 @@ impl HirFileId { #[inline] pub fn file_id(self) -> Option { match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => Some(FileId(self.0)), + 0 => Some(FileId::from_raw(self.0)), _ => None, } } @@ -200,7 +194,7 @@ impl HirFileId { #[inline] pub fn repr(self) -> HirFileIdRepr { match self.0 & Self::MACRO_FILE_TAG_MASK { - 0 => HirFileIdRepr::FileId(FileId(self.0)), + 0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)), _ => HirFileIdRepr::MacroFile(MacroFileId { macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), }), diff --git a/crates/hir-def/src/attr/tests.rs b/crates/hir-def/src/attr/tests.rs index 796f165c7c876..0f98a4ec93c63 100644 --- a/crates/hir-def/src/attr/tests.rs +++ b/crates/hir-def/src/attr/tests.rs @@ -13,7 +13,7 @@ fn assert_parse_result(input: &str, expected: DocExpr) { let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = syntax_node_to_token_tree( tt.syntax(), - SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))), + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), ); let cfg = DocExpr::parse(&tt); assert_eq!(cfg, expected); diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 1c1c481a8eedc..0d95d916ff99b 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -524,7 +524,7 @@ fn file_id_and_params_of( (src.file_id, src.value.generic_param_list()) } // We won't be using this ID anyway - GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None), + GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None), } } diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index ed8639d7a1d94..174e59056981b 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -307,6 +307,40 @@ impl InFile { }; range } + + pub fn original_node_file_range( + self, + db: &dyn db::ExpandDatabase, + ) -> (FileRange, SyntaxContextId) { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) + } + HirFileIdRepr::MacroFile(mac_file) => { + match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) { + Some(it) => it, + None => { + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + (loc.kind.original_call_range(db), SyntaxContextId::ROOT) + } + } + } + } + } + + pub fn original_node_file_range_opt( + self, + db: &dyn db::ExpandDatabase, + ) -> Option<(FileRange, SyntaxContextId)> { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => { + Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)) + } + HirFileIdRepr::MacroFile(mac_file) => { + ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) + } + } + } } impl InFile { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 77ddf7a48c306..11775c531d4cd 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -54,8 +54,10 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta let dummy_range = TextRange::empty(TextSize::new(0)); // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as // the index into the replacement vec but only if the end points to !0 - let dummy_anchor = - SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) }; + let dummy_anchor = SpanAnchor { + file_id: FileId::from_raw(!0), + ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)), + }; let fake_span = |range| SpanData { range: dummy_range, anchor: dummy_anchor, @@ -308,7 +310,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { .filter(|tt| match tt { tt::TokenTree::Leaf(leaf) => { let span = leaf.span(); - span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0) + span.anchor.file_id != FileId::from_raw(!0) || span.range.end() == TextSize::new(!0) } tt::TokenTree::Subtree(_) => true, }) @@ -318,7 +320,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if leaf.span().anchor.file_id == FileId(!0) { + if leaf.span().anchor.file_id == FileId::from_raw(!0) { let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone(); if original.delimiter.kind == tt::DelimiterKind::Invisible { original.token_trees.into() @@ -373,7 +375,7 @@ mod tests { #[track_caller] fn check(ra_fixture: &str, mut expect: Expect) { let parsed = syntax::SourceFile::parse(ra_fixture); - let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0)))); + let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); let mut tt = mbe::syntax_node_to_token_tree_modified( &parsed.syntax_node(), diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index a809e92d62221..7b03709aced00 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -197,6 +197,7 @@ pub trait SyntaxContextExt { fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self; fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self; fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self; + fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency); fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option, Transparency); fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)>; } @@ -223,6 +224,11 @@ impl SyntaxContextExt for SyntaxContextId { let data = db.lookup_intern_syntax_context(self); (data.outer_expn, data.outer_transparency) } + fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option, Transparency) { + let data = db.lookup_intern_syntax_context(*self); + *self = data.parent; + (data.outer_expn, data.outer_transparency) + } fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option, Transparency)> { let mut marks = marks_rev(self, db).collect::>(); marks.reverse(); diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 71c98b2770aa3..167ba0eb5da1c 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -380,6 +380,21 @@ impl MacroDefId { db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) } + pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile { + match self.kind { + MacroDefKind::Declarative(id) + | MacroDefKind::BuiltIn(_, id) + | MacroDefKind::BuiltInAttr(_, id) + | MacroDefKind::BuiltInDerive(_, id) + | MacroDefKind::BuiltInEager(_, id) => { + id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) + } + MacroDefKind::ProcMacro(_, _, id) => { + id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range()) + } + } + } + pub fn ast_id(&self) -> Either, AstId> { match self.kind { MacroDefKind::ProcMacro(.., id) => return Either::Right(id), diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index 0950f5d287555..acbde26c8ddf0 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -261,8 +261,8 @@ mod tests { assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); expect![[r#" - SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } - IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); + SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } + IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t); } #[test] diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 0ac1db9311b1d..185853353181d 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -241,7 +241,7 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option { ModPath::from_src( db.upcast(), ast_path, - SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))), + SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)), ) }; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 53e60c5862ad6..7e210740eac0c 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -125,6 +125,7 @@ pub use { }, hir_expand::{ attrs::{Attr, AttrId}, + hygiene::{marks_rev, SyntaxContextExt}, name::{known, Name}, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, }, diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index a392070fd88e2..2b03d575cb277 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -9,7 +9,7 @@ use hir_def::{ }; use hir_expand::{HirFileId, InFile}; use hir_ty::db::HirDatabase; -use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr}; +use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr}; use crate::{Module, ModuleDef, Semantics}; @@ -32,7 +32,7 @@ pub struct DeclarationLocation { /// This points to the whole syntax node of the declaration. pub ptr: SyntaxNodePtr, /// This points to the [`syntax::ast::Name`] identifier of the declaration. - pub name_ptr: SyntaxNodePtr, + pub name_ptr: AstPtr, } impl DeclarationLocation { @@ -185,7 +185,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(use_tree_src.syntax()), - name_ptr: SyntaxNodePtr::new(name.syntax()), + name_ptr: AstPtr::new(&name), }; self.symbols.push(FileSymbol { @@ -289,7 +289,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: source.file_id, ptr: SyntaxNodePtr::new(source.value.syntax()), - name_ptr: SyntaxNodePtr::new(name_node.syntax()), + name_ptr: AstPtr::new(&name_node), }; if let Some(attrs) = def.attrs(self.db) { @@ -322,7 +322,7 @@ impl<'a> SymbolCollector<'a> { let dec_loc = DeclarationLocation { hir_file_id: declaration.file_id, ptr: SyntaxNodePtr::new(module.syntax()), - name_ptr: SyntaxNodePtr::new(name_node.syntax()), + name_ptr: AstPtr::new(&name_node), }; let def = ModuleDef::Module(module_id.into()); diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt index 72a6eb5eabd14..4a72881fe5e4a 100644 --- a/crates/ide-db/src/test_data/test_doc_alias.txt +++ b/crates/ide-db/src/test_data/test_doc_alias.txt @@ -27,10 +27,12 @@ kind: STRUCT, range: 83..119, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), }, container_name: None, is_alias: false, @@ -54,10 +56,12 @@ kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: false, @@ -81,10 +85,12 @@ kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -108,10 +114,12 @@ kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -135,10 +143,12 @@ kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, @@ -162,10 +172,12 @@ kind: STRUCT, range: 83..119, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 109..118, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 109..118, + }, + ), }, container_name: None, is_alias: true, @@ -189,10 +201,12 @@ kind: STRUCT, range: 0..81, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 74..80, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 74..80, + }, + ), }, container_name: None, is_alias: true, diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 375ac5598152f..da1f3167d7d49 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -25,10 +25,12 @@ kind: TYPE_ALIAS, range: 397..417, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 402..407, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 402..407, + }, + ), }, container_name: None, is_alias: false, @@ -50,10 +52,12 @@ kind: CONST, range: 340..361, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 346..351, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 346..351, + }, + ), }, container_name: None, is_alias: false, @@ -75,10 +79,12 @@ kind: CONST, range: 520..592, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 526..542, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 526..542, + }, + ), }, container_name: None, is_alias: false, @@ -102,10 +108,12 @@ kind: ENUM, range: 185..207, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 190..194, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 190..194, + }, + ), }, container_name: None, is_alias: false, @@ -129,10 +137,12 @@ kind: USE_TREE, range: 654..676, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 663..676, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 663..676, + }, + ), }, container_name: None, is_alias: false, @@ -156,10 +166,12 @@ kind: MACRO_DEF, range: 153..168, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 159..164, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 159..164, + }, + ), }, container_name: None, is_alias: false, @@ -181,10 +193,12 @@ kind: STATIC, range: 362..396, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 369..375, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 369..375, + }, + ), }, container_name: None, is_alias: false, @@ -208,10 +222,12 @@ kind: STRUCT, range: 170..184, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 177..183, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 177..183, + }, + ), }, container_name: None, is_alias: false, @@ -235,10 +251,12 @@ kind: STRUCT, range: 0..22, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 6..21, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 6..21, + }, + ), }, container_name: None, is_alias: false, @@ -262,10 +280,12 @@ kind: STRUCT, range: 318..336, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 325..335, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 325..335, + }, + ), }, container_name: Some( "main", @@ -291,10 +311,12 @@ kind: STRUCT, range: 555..581, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 562..580, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 562..580, + }, + ), }, container_name: Some( "CONST_WITH_INNER", @@ -320,10 +342,12 @@ kind: STRUCT, range: 479..507, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 486..506, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 486..506, + }, + ), }, container_name: None, is_alias: false, @@ -345,10 +369,12 @@ kind: TRAIT, range: 261..300, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 267..272, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 267..272, + }, + ), }, container_name: None, is_alias: false, @@ -372,10 +398,12 @@ kind: USE_TREE, range: 682..696, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 691..696, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 691..696, + }, + ), }, container_name: None, is_alias: false, @@ -399,10 +427,12 @@ kind: UNION, range: 208..222, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 214..219, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 214..219, + }, + ), }, container_name: None, is_alias: false, @@ -426,10 +456,12 @@ kind: MODULE, range: 419..457, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 423..428, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 423..428, + }, + ), }, container_name: None, is_alias: false, @@ -453,10 +485,12 @@ kind: MODULE, range: 594..604, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 598..603, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 598..603, + }, + ), }, container_name: None, is_alias: false, @@ -480,10 +514,12 @@ kind: MACRO_RULES, range: 51..131, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 64..77, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 64..77, + }, + ), }, container_name: None, is_alias: false, @@ -505,10 +541,12 @@ kind: FN, range: 242..257, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 245..252, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 245..252, + }, + ), }, container_name: None, is_alias: false, @@ -532,10 +570,12 @@ kind: MACRO_RULES, range: 1..48, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 14..31, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 14..31, + }, + ), }, container_name: None, is_alias: false, @@ -557,10 +597,12 @@ kind: FN, range: 302..338, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 305..309, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 305..309, + }, + ), }, container_name: None, is_alias: false, @@ -584,10 +626,12 @@ kind: USE_TREE, range: 611..648, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 628..648, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 628..648, + }, + ), }, container_name: None, is_alias: false, @@ -609,10 +653,12 @@ kind: FN, range: 279..298, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 282..290, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 282..290, + }, + ), }, container_name: Some( "Trait", @@ -649,10 +695,12 @@ kind: STRUCT, range: 435..455, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 442..454, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 442..454, + }, + ), }, container_name: None, is_alias: false, @@ -687,10 +735,12 @@ kind: USE_TREE, range: 111..143, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 127..143, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 127..143, + }, + ), }, container_name: None, is_alias: false, @@ -714,10 +764,12 @@ kind: STRUCT, range: 0..20, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 7..19, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 7..19, + }, + ), }, container_name: None, is_alias: false, @@ -741,10 +793,12 @@ kind: USE_TREE, range: 25..59, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 41..59, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 41..59, + }, + ), }, container_name: None, is_alias: false, @@ -768,10 +822,12 @@ kind: USE_TREE, range: 65..105, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 95..105, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 95..105, + }, + ), }, container_name: None, is_alias: false, @@ -795,10 +851,12 @@ kind: USE_TREE, range: 65..105, }, - name_ptr: SyntaxNodePtr { - kind: NAME, - range: 95..105, - }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 95..105, + }, + ), }, container_name: None, is_alias: false, diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index d5c3439f95638..0943574ec1b5b 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" crossbeam-channel = "0.5.5" +arrayvec = "0.7.4" either.workspace = true itertools.workspace = true tracing.workspace = true diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 5cc64e60ed852..458b852e2a1e7 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -1,5 +1,7 @@ //! Entry point for call-hierarchy +use std::iter; + use hir::{DescendPreference, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, @@ -66,7 +68,10 @@ pub(crate) fn incoming_calls( def.try_to_nav(sema.db) }); if let Some(nav) = nav { - calls.add(nav, sema.original_range(name.syntax()).range); + calls.add(nav.call_site, sema.original_range(name.syntax()).range); + if let Some(other) = nav.def_site { + calls.add(other, sema.original_range(name.syntax()).range); + } } } } @@ -117,8 +122,9 @@ pub(crate) fn outgoing_calls( function.try_to_nav(db).zip(Some(range)) } }?; - Some((nav_target, range)) + Some(nav_target.into_iter().zip(iter::repeat(range))) }) + .flatten() .for_each(|(nav, range)| calls.add(nav, range)); Some(calls.into_items()) diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index 9ae70ae66f504..f388aea4c379b 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -1,4 +1,4 @@ -use std::ffi::OsStr; +use std::{ffi::OsStr, iter}; use expect_test::{expect, Expect}; use hir::Semantics; @@ -63,10 +63,12 @@ fn check_doc_links(ra_fixture: &str) { let defs = extract_definitions_from_docs(&docs); let actual: Vec<_> = defs .into_iter() - .map(|(_, link, ns)| { + .flat_map(|(_, link, ns)| { let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns) .unwrap_or_else(|| panic!("Failed to resolve {link}")); - let nav_target = def.try_to_nav(sema.db).unwrap(); + def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link)) + }) + .map(|(nav_target, link)| { let range = FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; (range, link) diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index ad7ec1964567b..fae1007435432 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -66,6 +66,7 @@ pub(crate) fn goto_declaration( let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.try_to_nav(db) }) + .flatten() .collect(); if info.is_empty() { diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 5ca82a362f53e..7491879a67fb4 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -52,7 +52,7 @@ pub(crate) fn goto_definition( if let Some(doc_comment) = token_as_doc_comment(&original_token) { return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| { let nav = def.try_to_nav(db)?; - Some(RangeInfo::new(link_range, vec![nav])) + Some(RangeInfo::new(link_range, nav.collect())) }); } @@ -88,6 +88,7 @@ pub(crate) fn goto_definition( .resolved_crate(db) .map(|it| it.root_module().to_nav(sema.db)) .into_iter() + .flatten() .collect(); } try_filter_trait_item_definition(sema, &def) @@ -138,6 +139,7 @@ fn try_lookup_include_path( docs: None, }) } + /// finds the trait definition of an impl'd item, except function /// e.g. /// ```rust @@ -166,13 +168,13 @@ fn try_filter_trait_item_definition( .iter() .filter(|itm| discriminant(*itm) == discri_value) .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten()) - .map(|it| vec![it]) + .map(|it| it.collect()) } } } fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec { - def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default() + def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default() } #[cfg(test)] @@ -405,8 +407,6 @@ fn bar() { ); } - // FIXME: We should emit two targets here, one for the identifier in the declaration, one for - // the macro call #[test] fn goto_def_for_macro_defined_fn_no_arg() { check( @@ -414,7 +414,7 @@ fn bar() { //- /lib.rs macro_rules! define_fn { () => (fn foo() {}) - + //^^^ } define_fn!(); @@ -1748,9 +1748,9 @@ macro_rules! foo { fn $ident(Foo { $ident }: Foo) {} } } -foo!(foo$0); - //^^^ - //^^^ + foo!(foo$0); + //^^^ + //^^^ "#, ); check( diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index bb474282dd7c8..6384db39d7c62 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -82,7 +82,11 @@ pub(crate) fn goto_implementation( } fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec { - Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect() + Impl::all_for_type(sema.db, ty) + .into_iter() + .filter_map(|imp| imp.try_to_nav(sema.db)) + .flatten() + .collect() } fn impls_for_trait( @@ -92,6 +96,7 @@ fn impls_for_trait( Impl::all_for_trait(sema.db, trait_) .into_iter() .filter_map(|imp| imp.try_to_nav(sema.db)) + .flatten() .collect() } @@ -109,6 +114,7 @@ fn impls_for_trait_item( })?; item.try_to_nav(sema.db) }) + .flatten() .collect() } diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs index 83f134aaaf576..ad393d98001b2 100644 --- a/crates/ide/src/goto_type_definition.rs +++ b/crates/ide/src/goto_type_definition.rs @@ -31,9 +31,11 @@ pub(crate) fn goto_type_definition( let mut res = Vec::new(); let mut push = |def: Definition| { - if let Some(nav) = def.try_to_nav(db) { - if !res.contains(&nav) { - res.push(nav); + if let Some(navs) = def.try_to_nav(db) { + for nav in navs { + if !res.contains(&nav) { + res.push(nav); + } } } }; diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 8daff8c2ebe3a..3aed007f3ea55 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -117,7 +117,7 @@ fn highlight_closure_captures( local .sources(sema.db) .into_iter() - .map(|x| x.to_nav(sema.db)) + .flat_map(|x| x.to_nav(sema.db)) .filter(|decl| decl.file_id == file_id) .filter_map(|decl| decl.focus_range) .map(move |range| HighlightedRange { range, category }) @@ -216,7 +216,7 @@ fn highlight_references( local .sources(sema.db) .into_iter() - .map(|x| x.to_nav(sema.db)) + .flat_map(|x| x.to_nav(sema.db)) .filter(|decl| decl.file_id == file_id) .filter_map(|decl| decl.focus_range) .map(|range| HighlightedRange { range, category }) @@ -225,21 +225,27 @@ fn highlight_references( }); } def => { - let hl_range = match def { + let navs = match def { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(sema.db, module)) + NavigationTarget::from_module_to_decl(sema.db, module) + } + def => match def.try_to_nav(sema.db) { + Some(it) => it, + None => continue, + }, + }; + for nav in navs { + if nav.file_id != file_id { + continue; + } + let hl_range = nav.focus_range.map(|range| { + let category = references::decl_mutability(&def, node, range) + .then_some(ReferenceCategory::Write); + HighlightedRange { range, category } + }); + if let Some(hl_range) = hl_range { + res.insert(hl_range); } - def => def.try_to_nav(sema.db), - } - .filter(|decl| decl.file_id == file_id) - .and_then(|decl| decl.focus_range) - .map(|range| { - let category = references::decl_mutability(&def, node, range) - .then_some(ReferenceCategory::Write); - HighlightedRange { range, category } - }); - if let Some(hl_range) = hl_range { - res.insert(hl_range); } } } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 88a5b623425d1..5ad119ace89db 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -21,6 +21,7 @@ use crate::{ doc_links::token_as_doc_comment, markdown_remove::remove_markdown, markup::Markup, + navigation_target::UpmappingResult, runnables::{runnable_fn, runnable_mod}, FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, }; @@ -73,7 +74,7 @@ impl HoverAction { it.module(db)?, it.name(db).map(|name| name.display(db).to_string()), ), - nav: it.try_to_nav(db)?, + nav: it.try_to_nav(db)?.call_site(), }) }) .collect(); @@ -342,22 +343,26 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option return it.try_to_nav(db).map(to_action), + Definition::Trait(it) => { + return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) + } Definition::Adt(it) => Some(it), Definition::SelfType(it) => it.self_ty(db).as_adt(), _ => None, }?; - adt.try_to_nav(db).map(to_action) + adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action) } fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option { match def { - Definition::Function(it) => it.try_to_nav(db).map(|nav_target| { - HoverAction::Reference(FilePosition { - file_id: nav_target.file_id, - offset: nav_target.focus_or_full_range().start(), + Definition::Function(it) => { + it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| { + HoverAction::Reference(FilePosition { + file_id: nav_target.file_id, + offset: nav_target.focus_or_full_range().start(), + }) }) - }), + } _ => None, } } diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 7ea9d4f1038b4..ca334e9157977 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -315,6 +315,7 @@ impl HirWrite for InlayHintLabelBuilder<'_> { } self.make_new_part(); let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; + let location = location.call_site(); let location = FileRange { file_id: location.file_id, range: location.focus_or_full_range() }; self.location = Some(location); diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 3390331e0e8a9..d8f6e4e1b1b15 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -100,7 +100,7 @@ pub use crate::{ markup::Markup, moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation}, move_item::Direction, - navigation_target::NavigationTarget, + navigation_target::{NavigationTarget, UpmappingResult}, prime_caches::ParallelPrimeCachesProgress, references::ReferenceSearchResult, rename::RenameError, @@ -230,7 +230,7 @@ impl Analysis { // `AnalysisHost` for creating a fully-featured analysis. pub fn from_single_file(text: String) -> (Analysis, FileId) { let mut host = AnalysisHost::default(); - let file_id = FileId(0); + let file_id = FileId::from_raw(0); let mut file_set = FileSet::default(); file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string())); let source_root = SourceRoot::new_local(file_set); @@ -413,6 +413,7 @@ impl Analysis { symbol_index::world_symbols(db, query) .into_iter() // xx: should we make this a par iter? .filter_map(|s| s.try_to_nav(db)) + .map(UpmappingResult::call_site) .collect::>() }) } diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index df0c4a6adee26..31f4aad41e521 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -2,6 +2,7 @@ use std::fmt; +use arrayvec::ArrayVec; use either::Either; use hir::{ db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, @@ -72,15 +73,15 @@ impl fmt::Debug for NavigationTarget { } pub(crate) trait ToNav { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget; + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult; } pub(crate) trait TryToNav { - fn try_to_nav(&self, db: &RootDatabase) -> Option; + fn try_to_nav(&self, db: &RootDatabase) -> Option>; } impl TryToNav for Either { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { Either::Left(it) => it.try_to_nav(db), Either::Right(it) => it.try_to_nav(db), @@ -93,23 +94,30 @@ impl NavigationTarget { self.focus_range.unwrap_or(self.full_range) } - pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget { + pub(crate) fn from_module_to_decl( + db: &RootDatabase, + module: hir::Module, + ) -> UpmappingResult { let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); - if let Some(InFile { value, file_id }) = &module.declaration_source(db) { - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, *file_id, value.syntax(), value.name()); - let mut res = NavigationTarget::from_syntax( - file_id, - name, - focus_range, - full_range, - SymbolKind::Module, - ); - res.docs = module.docs(db); - res.description = Some(module.display(db).to_string()); - return res; + match module.declaration_source(db) { + Some(InFile { value, file_id }) => { + orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let mut res = NavigationTarget::from_syntax( + file_id, + name.clone(), + focus_range, + full_range, + SymbolKind::Module, + ); + res.docs = module.docs(db); + res.description = Some(module.display(db).to_string()); + res + }, + ) + } + _ => module.to_nav(db), } - module.to_nav(db) } #[cfg(test)] @@ -135,13 +143,14 @@ impl NavigationTarget { db: &RootDatabase, InFile { file_id, value }: InFile<&dyn ast::HasName>, kind: SymbolKind, - ) -> NavigationTarget { - let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); - - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.name()); + ) -> UpmappingResult { + let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into()); - NavigationTarget::from_syntax(file_id, name, focus_range, full_range, kind) + orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind) + }, + ) } fn from_syntax( @@ -166,49 +175,51 @@ impl NavigationTarget { } impl TryToNav for FileSymbol { - fn try_to_nav(&self, db: &RootDatabase) -> Option { - let full_range = self.loc.original_range(db); - let focus_range = self.loc.original_name_range(db); - let focus_range = if focus_range.file_id == full_range.file_id - && full_range.range.contains_range(focus_range.range) - { - Some(focus_range.range) - } else { - None - }; - - Some(NavigationTarget { - file_id: full_range.file_id, - name: self - .is_alias - .then(|| self.def.name(db)) - .flatten() - .map_or_else(|| self.name.clone(), |it| it.to_smol_str()), - alias: self.is_alias.then(|| self.name.clone()), - kind: Some(hir::ModuleDefId::from(self.def).into()), - full_range: full_range.range, - focus_range, - container_name: self.container_name.clone(), - description: match self.def { - hir::ModuleDef::Module(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Function(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Const(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Static(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()), - hir::ModuleDef::BuiltinType(_) => None, - }, - docs: None, - }) + fn try_to_nav(&self, db: &RootDatabase) -> Option> { + let root = db.parse_or_expand(self.loc.hir_file_id); + self.loc.ptr.to_node(&root); + Some( + orig_range_with_focus( + db, + self.loc.hir_file_id, + &self.loc.ptr.to_node(&root), + Some(self.loc.name_ptr.to_node(&root)), + ) + .map(|(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget { + file_id, + name: self + .is_alias + .then(|| self.def.name(db)) + .flatten() + .map_or_else(|| self.name.clone(), |it| it.to_smol_str()), + alias: self.is_alias.then(|| self.name.clone()), + kind: Some(hir::ModuleDefId::from(self.def).into()), + full_range, + focus_range, + container_name: self.container_name.clone(), + description: match self.def { + hir::ModuleDef::Module(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Function(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Const(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Static(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()), + hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()), + hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()), + hir::ModuleDef::BuiltinType(_) => None, + }, + docs: None, + } + }), + ) } } impl TryToNav for Definition { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { Definition::Local(it) => Some(it.to_nav(db)), Definition::Label(it) => Some(it.to_nav(db)), @@ -236,7 +247,7 @@ impl TryToNav for Definition { } impl TryToNav for hir::ModuleDef { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::ModuleDef::Module(it) => Some(it.to_nav(db)), hir::ModuleDef::Function(it) => it.try_to_nav(db), @@ -334,22 +345,26 @@ where D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay, D::Ast: ast::HasName, { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; - let mut res = NavigationTarget::from_named( - db, - src.as_ref().map(|it| it as &dyn ast::HasName), - D::KIND, - ); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res.container_name = self.container_name(db); - Some(res) + Some( + NavigationTarget::from_named( + db, + src.as_ref().map(|it| it as &dyn ast::HasName), + D::KIND, + ) + .map(|mut res| { + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res.container_name = self.container_name(db); + res + }), + ) } } impl ToNav for hir::Module { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = self.definition_source(db); let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default(); @@ -358,13 +373,23 @@ impl ToNav for hir::Module { ModuleSource::Module(node) => (node.syntax(), node.name()), ModuleSource::BlockExpr(node) => (node.syntax(), None), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - NavigationTarget::from_syntax(file_id, name, focus_range, full_range, SymbolKind::Module) + + orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + name.clone(), + focus_range, + full_range, + SymbolKind::Module, + ) + }, + ) } } impl TryToNav for hir::Impl { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; let derive_path = self.as_builtin_derive_path(db); @@ -373,82 +398,100 @@ impl TryToNav for hir::Impl { None => (file_id, value.self_ty(), value.syntax()), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - Some(NavigationTarget::from_syntax( - file_id, - "impl".into(), - focus_range, - full_range, - SymbolKind::Impl, + Some(orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + "impl".into(), + focus_range, + full_range, + SymbolKind::Impl, + ) + }, )) } } impl TryToNav for hir::ExternCrateDecl { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let InFile { file_id, value } = src; let focus = value .rename() .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right)); - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), focus); - let mut res = NavigationTarget::from_syntax( - file_id, - self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(), - focus_range, - full_range, - SymbolKind::Module, - ); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res.container_name = container_name(db, *self); - Some(res) + Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let mut res = NavigationTarget::from_syntax( + file_id, + self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(), + focus_range, + full_range, + SymbolKind::Module, + ); + + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res.container_name = container_name(db, *self); + res + }, + )) } } impl TryToNav for hir::Field { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let field_source = match &src.value { FieldSource::Named(it) => { - let mut res = - NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field); - res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res - } - FieldSource::Pos(it) => { - let FileRange { file_id, range } = - src.with_value(it.syntax()).original_file_range(db); - NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field) + NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( + |mut res| { + res.docs = self.docs(db); + res.description = Some(self.display(db).to_string()); + res + }, + ) } + FieldSource::Pos(it) => orig_range(db, src.file_id, it.syntax()).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + NavigationTarget::from_syntax( + file_id, + format!("{}", self.index()).into(), + focus_range, + full_range, + SymbolKind::Field, + ) + }, + ), }; Some(field_source) } } impl TryToNav for hir::Macro { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; let name_owner: &dyn ast::HasName = match &src.value { Either::Left(it) => it, Either::Right(it) => it, }; - let mut res = NavigationTarget::from_named( - db, - src.as_ref().with_value(name_owner), - self.kind(db).into(), - ); - res.docs = self.docs(db); - Some(res) + Some( + NavigationTarget::from_named( + db, + src.as_ref().with_value(name_owner), + self.kind(db).into(), + ) + .map(|mut res| { + res.docs = self.docs(db); + res + }), + ) } } impl TryToNav for hir::Adt { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::Adt::Struct(it) => it.try_to_nav(db), hir::Adt::Union(it) => it.try_to_nav(db), @@ -458,7 +501,7 @@ impl TryToNav for hir::Adt { } impl TryToNav for hir::AssocItem { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { AssocItem::Function(it) => it.try_to_nav(db), AssocItem::Const(it) => it.try_to_nav(db), @@ -468,7 +511,7 @@ impl TryToNav for hir::AssocItem { } impl TryToNav for hir::GenericParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { match self { hir::GenericParam::TypeParam(it) => it.try_to_nav(db), hir::GenericParam::ConstParam(it) => it.try_to_nav(db), @@ -478,7 +521,7 @@ impl TryToNav for hir::GenericParam { } impl ToNav for LocalSource { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = &self.source; let file_id = *file_id; let local = self.local; @@ -487,60 +530,61 @@ impl ToNav for LocalSource { Either::Right(it) => (it.syntax(), it.name()), }; - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, node, name); - - let name = local.name(db).to_smol_str(); - let kind = if local.is_self(db) { - SymbolKind::SelfParam - } else if local.is_param(db) { - SymbolKind::ValueParam - } else { - SymbolKind::Local - }; - NavigationTarget { - file_id, - name, - alias: None, - kind: Some(kind), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } + orig_range_with_focus(db, file_id, node, name).map( + |(FileRange { file_id, range: full_range }, focus_range)| { + let name = local.name(db).to_smol_str(); + let kind = if local.is_self(db) { + SymbolKind::SelfParam + } else if local.is_param(db) { + SymbolKind::ValueParam + } else { + SymbolKind::Local + }; + NavigationTarget { + file_id, + name, + alias: None, + kind: Some(kind), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + } + }, + ) } } impl ToNav for hir::Local { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { self.primary_source(db).to_nav(db) } } impl ToNav for hir::Label { - fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { + fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = self.source(db); let name = self.name(db).to_smol_str(); - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()); - - NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::Label), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - } + orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::Label), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + ) } } impl TryToNav for hir::TypeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; let name = self.name(db).to_smol_str(); @@ -559,51 +603,51 @@ impl TryToNav for hir::TypeParam { }; let focus = value.as_ref().either(|it| it.name(), |it| it.name()); - let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); - - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::TypeParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }) + Some(orig_range_with_focus(db, file_id, syntax, focus).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::TypeParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) } } impl TryToNav for hir::TypeOrConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { self.split(db).try_to_nav(db) } } impl TryToNav for hir::LifetimeParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; let name = self.name(db).to_smol_str(); - let FileRange { file_id, range } = - InFile::new(file_id, value.syntax()).original_file_range(db); - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::LifetimeParam), - full_range: range, - focus_range: Some(range), - container_name: None, - description: None, - docs: None, - }) + Some(orig_range(db, file_id, value.syntax()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::LifetimeParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) } } impl TryToNav for hir::ConstParam { - fn try_to_nav(&self, db: &RootDatabase) -> Option { + fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; let name = self.name(db).to_smol_str(); @@ -615,46 +659,180 @@ impl TryToNav for hir::ConstParam { } }; - let (file_id, full_range, focus_range) = - orig_range_with_focus(db, file_id, value.syntax(), value.name()); - Some(NavigationTarget { - file_id, - name, - alias: None, - kind: Some(SymbolKind::ConstParam), - full_range, - focus_range, - container_name: None, - description: None, - docs: None, - }) + Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( + |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { + file_id, + name: name.clone(), + alias: None, + kind: Some(SymbolKind::ConstParam), + full_range, + focus_range, + container_name: None, + description: None, + docs: None, + }, + )) + } +} + +#[derive(Debug)] +pub struct UpmappingResult { + /// The macro call site. + pub call_site: T, + /// The macro definition site, if relevant. + pub def_site: Option, +} + +impl UpmappingResult { + pub fn call_site(self) -> T { + self.call_site + } + + pub fn collect>(self) -> FI { + FI::from_iter(self.into_iter()) + } +} + +impl IntoIterator for UpmappingResult { + type Item = T; + + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.def_site + .into_iter() + .chain(Some(self.call_site)) + .collect::>() + .into_iter() + } +} + +impl UpmappingResult { + fn map(self, f: impl Fn(T) -> U) -> UpmappingResult { + UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) } } } /// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions -/// Additionally verifies that the name span is in bounds and related to the original range. +/// May return two results if the mapped node originates from a macro definition in which case the +/// second result is the creating macro call. fn orig_range_with_focus( db: &RootDatabase, hir_file: HirFileId, value: &SyntaxNode, name: Option, -) -> (FileId, TextRange, Option) { - let FileRange { file_id, range } = - match InFile::new(hir_file, value).original_file_range_opt(db) { - Some((range, ctxt)) if ctxt.is_root() => range, - _ => db - .lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) - .kind - .original_call_range(db), +) -> UpmappingResult<(FileRange, Option)> { + let Some(name) = name else { return orig_range(db, hir_file, value) }; + + let call_range = || { + db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) + .kind + .original_call_range(db) + }; + + let def_range = || { + db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) + .def + .definition_range(db) + }; + + // FIXME What about include!d things + + let value_range = InFile::new(hir_file, value).original_file_range_opt(db); + let ((call_site_range, call_site_focus), def_site) = + match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) { + // call site name + Some((focus_range, ctxt)) if ctxt.is_root() => { + // Try to upmap the node as well, if it ends up in the def site, go back to the call site + ( + ( + match value_range { + // name is in the node in the macro input so we can return it + Some((range, ctxt)) + if ctxt.is_root() + && range.file_id == focus_range.file_id + && range.range.contains_range(focus_range.range) => + { + range + } + // name lies outside the node, so instead point to the macro call which + // *should* contain the name + _ => call_range(), + }, + Some(focus_range), + ), + // no def site relevant + None, + ) + } + + // def site name + // FIXME: This can be de improved + Some((focus_range, _ctxt)) => { + match value_range { + // but overall node is in macro input + Some((range, ctxt)) if ctxt.is_root() => ( + // node mapped up in call site, show the node + (range, None), + // def site, if the name is in the (possibly) upmapped def site range, show the + // def site + { + let (def_site, _) = def_range().original_node_file_range(db); + (def_site.file_id == focus_range.file_id + && def_site.range.contains_range(focus_range.range)) + .then_some((def_site, Some(focus_range))) + }, + ), + // node is in macro def, just show the focus + _ => ( + // show the macro call + (call_range(), None), + Some((focus_range, Some(focus_range))), + ), + } + } + // lost name? can't happen for single tokens + None => return orig_range(db, hir_file, value), }; - let focus_range = name - .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db)) - .filter(|(frange, ctxt)| { - ctxt.is_root() && frange.file_id == file_id && frange.range.contains_range(frange.range) - }) - .map(|(frange, _ctxt)| frange.range); - - (file_id, range, focus_range) + + UpmappingResult { + call_site: ( + call_site_range, + call_site_focus.and_then(|FileRange { file_id, range }| { + if call_site_range.file_id == file_id && call_site_range.range.contains_range(range) + { + Some(range) + } else { + None + } + }), + ), + def_site: def_site.map(|(def_site_range, def_site_focus)| { + ( + def_site_range, + def_site_focus.and_then(|FileRange { file_id, range }| { + if def_site_range.file_id == file_id + && def_site_range.range.contains_range(range) + { + Some(range) + } else { + None + } + }), + ) + }), + } +} + +fn orig_range( + db: &RootDatabase, + hir_file: HirFileId, + value: &SyntaxNode, +) -> UpmappingResult<(FileRange, Option)> { + UpmappingResult { + call_site: (InFile::new(hir_file, value).original_file_range(db), None), + def_site: None, + } } #[cfg(test)] diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 506f9452cf196..413dbf9c5dfc6 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -45,11 +45,11 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec sema .to_def(&module) .into_iter() - .map(|module| NavigationTarget::from_module_to_decl(db, module)) + .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) .collect(), None => sema .to_module_defs(position.file_id) - .map(|module| NavigationTarget::from_module_to_decl(db, module)) + .flat_map(|module| NavigationTarget::from_module_to_decl(db, module)) .collect(), } } diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index b805ddfa26789..6c0fb0baf2e2b 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -9,6 +9,8 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. +use std::collections::HashMap; + use hir::{DescendPreference, PathResolution, Semantics}; use ide_db::{ base_db::FileId, @@ -60,19 +62,6 @@ pub(crate) fn find_all_refs( let syntax = sema.parse(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { - let declaration = match def { - Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(sema.db, module)) - } - def => def.try_to_nav(sema.db), - } - .map(|nav| { - let decl_range = nav.focus_or_full_range(); - Declaration { - is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range), - nav, - } - }); let mut usages = def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all(); @@ -80,7 +69,7 @@ pub(crate) fn find_all_refs( retain_adt_literal_usages(&mut usages, def, sema); } - let references = usages + let mut references = usages .into_iter() .map(|(file_id, refs)| { ( @@ -91,8 +80,30 @@ pub(crate) fn find_all_refs( .collect(), ) }) - .collect(); - + .collect::, _>>(); + let declaration = match def { + Definition::Module(module) => { + Some(NavigationTarget::from_module_to_decl(sema.db, module)) + } + def => def.try_to_nav(sema.db), + } + .map(|nav| { + let (nav, extra_ref) = match nav.def_site { + Some(call) => (call, Some(nav.call_site)), + None => (nav.call_site, None), + }; + if let Some(extra_ref) = extra_ref { + references + .entry(extra_ref.file_id) + .or_default() + .push((extra_ref.focus_or_full_range(), None)); + } + let decl_range = nav.focus_or_full_range(); + Declaration { + is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range), + nav, + } + }); ReferenceSearchResult { declaration, references } } }; @@ -882,7 +893,7 @@ pub(super) struct Foo$0 { check_with_scope( code, - Some(SearchScope::single_file(FileId(2))), + Some(SearchScope::single_file(FileId::from_raw(2))), expect![[r#" quux Function FileId(0) 19..35 26..30 @@ -1181,7 +1192,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 55..57 55..57 + 'a LifetimeParam FileId(0) 55..57 FileId(0) 63..65 FileId(0) 71..73 @@ -1199,7 +1210,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> { type Foo<'a, T> where T: 'a$0 = &'a T; "#, expect![[r#" - 'a LifetimeParam FileId(0) 9..11 9..11 + 'a LifetimeParam FileId(0) 9..11 FileId(0) 25..27 FileId(0) 31..33 @@ -1221,7 +1232,7 @@ impl<'a> Foo<'a> for &'a () { } "#, expect![[r#" - 'a LifetimeParam FileId(0) 47..49 47..49 + 'a LifetimeParam FileId(0) 47..49 FileId(0) 55..57 FileId(0) 64..66 diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index d487a538bb574..c0b556f5441e7 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -335,7 +335,8 @@ pub(crate) fn runnable_fn( sema.db, def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName), SymbolKind::Function, - ); + ) + .call_site(); let cfg = def.attrs(sema.db).cfg(); Some(Runnable { use_name_in_title: false, nav, kind, cfg }) } @@ -357,7 +358,7 @@ pub(crate) fn runnable_mod( let attrs = def.attrs(sema.db); let cfg = attrs.cfg(); - let nav = NavigationTarget::from_module_to_decl(sema.db, def); + let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site(); Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg }) } @@ -370,7 +371,7 @@ pub(crate) fn runnable_impl( return None; } let cfg = attrs.cfg(); - let nav = def.try_to_nav(sema.db)?; + let nav = def.try_to_nav(sema.db)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); let mut ty_args = ty.generic_parameters(sema.db).peekable(); @@ -407,7 +408,7 @@ fn runnable_mod_outline_definition( match def.definition_source(sema.db).value { hir::ModuleSource::SourceFile(_) => Some(Runnable { use_name_in_title: false, - nav: def.to_nav(sema.db), + nav: def.to_nav(sema.db).call_site(), kind: RunnableKind::TestMod { path }, cfg, }), @@ -465,7 +466,8 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let mut nav = match def { Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), def => def.try_to_nav(db)?, - }; + } + .call_site(); nav.focus_range = None; nav.description = None; nav.docs = None; diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 49e165d3dc50b..3724dc2822117 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -13,6 +13,7 @@ use ide_db::{ use syntax::{AstNode, SyntaxKind::*, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; +use crate::navigation_target::UpmappingResult; use crate::{ hover::hover_for_definition, inlay_hints::AdjustmentHintsMode, @@ -166,9 +167,8 @@ impl StaticIndex<'_> { } else { let it = self.tokens.insert(TokenStaticData { hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), - definition: def.try_to_nav(self.db).map(|it| FileRange { - file_id: it.file_id, - range: it.focus_or_full_range(), + definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { + FileRange { file_id: it.file_id, range: it.focus_or_full_range() } }), references: vec![], moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), @@ -179,7 +179,7 @@ impl StaticIndex<'_> { let token = self.tokens.get_mut(id).unwrap(); token.references.push(ReferenceData { range: FileRange { range, file_id }, - is_definition: match def.try_to_nav(self.db) { + is_definition: match def.try_to_nav(self.db).map(UpmappingResult::call_site) { Some(it) => it.file_id == file_id && it.focus_or_full_range() == range, None => false, }, diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index dd882d82fb64a..1d3e45aff385e 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -147,8 +147,10 @@ mod tests { use super::*; fn fixture_token_tree() -> Subtree { - let anchor = - SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; + let anchor = SpanAnchor { + file_id: FileId::from_raw(0), + ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), + }; let mut subtree = Subtree { delimiter: Delimiter { open: SpanData { diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 98f3063bb9829..4887b29815ae2 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -58,7 +58,7 @@ fn load_cargo_with_sysroot( &mut { |path| { let len = file_map.len(); - Some(*file_map.entry(path.to_path_buf()).or_insert(FileId(len as u32))) + Some(*file_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) } }, &Default::default(), @@ -142,7 +142,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacro let mut counter = 0; move |_path| { counter += 1; - Some(FileId(counter)) + Some(FileId::from_raw(counter)) } }, &Default::default(), diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 06c27332d4406..49c88702faad0 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -1438,7 +1438,7 @@ pub(crate) fn handle_inlay_hints_resolve( }; let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - let file_id = FileId(resolve_data.file_id); + let file_id = FileId::from_raw(resolve_data.file_id); anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data"); let line_index = snap.file_line_index(file_id)?; diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index fb366fd5cc41f..dae560c5de12b 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -457,7 +457,7 @@ pub(crate) fn inlay_hint( inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it)) }; let data = if needs_resolve && something_to_resolve { - Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.0 }).unwrap()) + Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index() }).unwrap()) } else { None }; diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index 71762996cd7d1..07641b203bb86 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -22,12 +22,17 @@ use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode}; pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr; /// Like `SyntaxNodePtr`, but remembers the type of node. -#[derive(Debug)] pub struct AstPtr { raw: SyntaxNodePtr, _ty: PhantomData N>, } +impl std::fmt::Debug for AstPtr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("AstPtr").field(&self.raw).finish() + } +} + impl Clone for AstPtr { fn clone(&self) -> AstPtr { AstPtr { raw: self.raw.clone(), _ty: PhantomData } diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index 7360f68c735e7..8ffda5d78d13e 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -60,11 +60,21 @@ pub use paths::{AbsPath, AbsPathBuf}; /// /// Most functions in rust-analyzer use this when they need to refer to a file. #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub struct FileId(pub u32); +pub struct FileId(u32); impl FileId { /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics! - pub const BOGUS: FileId = FileId(u32::MAX); + pub const BOGUS: FileId = FileId(0xe4e4e); + + #[inline] + pub fn from_raw(raw: u32) -> FileId { + FileId(raw) + } + + #[inline] + pub fn index(self) -> u32 { + self.0 + } } /// safe because `FileId` is a newtype of `u32` From 634d588fd757fab1d2d152bd1e07e747229531d5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 Dec 2023 14:36:39 +0100 Subject: [PATCH 46/80] Simplify --- .../hir-def/src/macro_expansion_tests/mod.rs | 52 ++++--- crates/hir-def/src/nameres/mod_resolution.rs | 4 +- crates/hir-expand/src/files.rs | 52 +++---- crates/hir-expand/src/lib.rs | 146 +++++++----------- crates/hir-ty/src/diagnostics/decl_check.rs | 4 +- crates/hir/src/lib.rs | 3 +- crates/hir/src/semantics.rs | 6 +- crates/hir/src/source_analyzer.rs | 13 +- crates/hir/src/symbols.rs | 4 - .../replace_derive_with_manual_impl.rs | 10 +- crates/ide/src/expand_macro.rs | 8 +- crates/ide/src/runnables.rs | 2 +- .../ide/src/syntax_highlighting/highlight.rs | 7 +- 13 files changed, 136 insertions(+), 175 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index bcbf4047caa24..be2a503d82b15 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -18,7 +18,7 @@ use std::{iter, ops::Range, sync}; use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; use expect_test::Expect; -use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId}; +use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt}; use stdx::format_to; use syntax::{ ast::{self, edit::IndentLevel}, @@ -172,35 +172,41 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream }; if let Some(src) = src { - if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) { - let call = src.file_id.call_node(&db).expect("macro file"); - let mut show_spans = false; - let mut show_ctxt = false; - for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) { - show_spans |= comment.to_string().contains("+spans"); - show_ctxt |= comment.to_string().contains("+syntaxctxt"); + if let Some(file_id) = src.file_id.macro_file() { + if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) { + let call = file_id.call_node(&db); + let mut show_spans = false; + let mut show_ctxt = false; + for comment in + call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) + { + show_spans |= comment.to_string().contains("+spans"); + show_ctxt |= comment.to_string().contains("+syntaxctxt"); + } + let pp = pretty_print_macro_expansion( + src.value, + db.span_map(src.file_id).as_ref(), + show_spans, + show_ctxt, + ); + format_to!(expanded_text, "\n{}", pp) } - let pp = pretty_print_macro_expansion( - src.value, - db.span_map(src.file_id).as_ref(), - show_spans, - show_ctxt, - ); - format_to!(expanded_text, "\n{}", pp) } } } for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); - if src.file_id.is_builtin_derive(&db) { - let pp = pretty_print_macro_expansion( - src.value.syntax().clone(), - db.span_map(src.file_id).as_ref(), - false, - false, - ); - format_to!(expanded_text, "\n{}", pp) + if let Some(macro_file) = src.file_id.macro_file() { + if macro_file.is_builtin_derive(&db) { + let pp = pretty_print_macro_expansion( + src.value.syntax().clone(), + db.span_map(macro_file.into()).as_ref(), + false, + false, + ); + format_to!(expanded_text, "\n{}", pp) + } } } diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 9dcb9717297fa..c45200e2de9df 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -1,7 +1,7 @@ //! This module resolves `mod foo;` declaration to file. use arrayvec::ArrayVec; use base_db::{AnchoredPath, FileId}; -use hir_expand::{name::Name, HirFileIdExt}; +use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt}; use limit::Limit; use syntax::SmolStr; @@ -73,7 +73,7 @@ impl ModDir { Some(attr_path) => { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } - None if file_id.is_include_macro(db.upcast()) => { + None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => { candidate_files.push(format!("{}.rs", name.display(db.upcast()))); candidate_files.push(format!("{}/mod.rs", name.display(db.upcast()))); } diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index 174e59056981b..7e55f6be1e412 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -6,9 +6,9 @@ use base_db::{ FileId, FileRange, }; use either::Either; -use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange}; +use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize}; -use crate::{db, ExpansionInfo, HirFileIdExt as _}; +use crate::{db, ExpansionInfo, MacroFileIdExt}; /// `InFile` stores a value of `T` inside a particular file/syntax tree. /// @@ -119,16 +119,6 @@ impl InFileWrapper { // region:specific impls impl InFile<&SyntaxNode> { - pub fn ancestors_with_macros( - self, - db: &dyn db::ExpandDatabase, - ) -> impl Iterator> + Clone + '_ { - iter::successors(Some(self.cloned()), move |node| match node.value.parent() { - Some(parent) => Some(node.with_value(parent)), - None => node.file_id.call_node(db), - }) - } - /// Skips the attributed item that caused the macro invocation we are climbing up pub fn ancestors_with_macros_skip_attr_item( self, @@ -137,8 +127,9 @@ impl InFile<&SyntaxNode> { let succ = move |node: &InFile| match node.value.parent() { Some(parent) => Some(node.with_value(parent)), None => { - let parent_node = node.file_id.call_node(db)?; - if node.file_id.is_attr_macro(db) { + let macro_file_id = node.file_id.macro_file()?; + let parent_node = macro_file_id.call_node(db); + if macro_file_id.is_attr_macro(db) { // macro call was an attributed item, skip it // FIXME: does this fail if this is a direct expansion of another macro? parent_node.map(|node| node.parent()).transpose() @@ -222,7 +213,7 @@ impl InFile<&SyntaxNode> { } HirFileIdRepr::MacroFile(m) => m, }; - if !self.file_id.is_attr_macro(db) { + if !file_id.is_attr_macro(db) { return None; } @@ -243,21 +234,23 @@ impl InFile<&SyntaxNode> { } } -impl InFile { +impl InMacroFile { pub fn upmap_once( self, db: &dyn db::ExpandDatabase, - ) -> Option>> { - Some(self.file_id.expansion_info(db)?.map_range_up_once(db, self.value.text_range())) + ) -> InFile> { + self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range()) } +} +impl InFile { /// Falls back to the macro call range if the node cannot be mapped up fully. pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { let (range, ctxt) = ExpansionInfo::new(db, mac_file) - .map_token_range_up(db, self.value.text_range()); + .span_for_offset(db, self.value.text_range().start()); // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -280,7 +273,7 @@ impl InFile { } HirFileIdRepr::MacroFile(mac_file) => { let (range, ctxt) = ExpansionInfo::new(db, mac_file) - .map_token_range_up(db, self.value.text_range()); + .span_for_offset(db, self.value.text_range().start()); // FIXME: Figure out an API that makes proper use of ctx, this only exists to // keep pre-token map rewrite behaviour. @@ -294,20 +287,13 @@ impl InFile { } } -impl InFile { - /// Attempts to map the syntax node back up its macro calls. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - let (range, _ctxt) = match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT) - } - HirFileIdRepr::MacroFile(m) => { - ExpansionInfo::new(db, m).map_token_range_up(db, self.value) - } - }; - range +impl InMacroFile { + pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) { + ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value) } +} +impl InFile { pub fn original_node_file_range( self, db: &dyn db::ExpandDatabase, @@ -353,7 +339,7 @@ impl InFile { } HirFileIdRepr::MacroFile(m) => m, }; - if !self.file_id.is_attr_macro(db) { + if !file_id.is_attr_macro(db) { return None; } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 167ba0eb5da1c..fe336aa1421e2 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -179,9 +179,6 @@ pub trait HirFileIdExt { /// one of the calls comes from an `include!``. fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId; - /// If this is a macro call, returns the syntax node of the call. - fn call_node(self, db: &dyn db::ExpandDatabase) -> Option>; - /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option>; @@ -190,19 +187,6 @@ pub trait HirFileIdExt { fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase) -> Option>; - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; - - /// Return whether this file is an include macro - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; - - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; - /// Return whether this file is an attr macro - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; - - /// Return whether this file is the pseudo expansion of the derive attribute. - /// See [`crate::builtin_attr_macro::derive_attr_expand`]. - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; } impl HirFileIdExt for HirFileId { @@ -241,12 +225,6 @@ impl HirFileIdExt for HirFileId { } } - fn call_node(self, db: &dyn db::ExpandDatabase) -> Option> { - let macro_file = self.macro_file()?; - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - Some(loc.to_node(db)) - } - fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option> { let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db); loop { @@ -278,77 +256,34 @@ impl HirFileIdExt for HirFileId { }; Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } +} - fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - matches!( - db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, - MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) - ) - } - None => false, - } - } - - fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - matches!( - db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, - MacroDefKind::BuiltInDerive(..) - ) - } - None => false, - } - } +pub trait MacroFileIdExt { + fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; + /// If this is a macro call, returns the syntax node of the call. + fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile; - fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include() - } - _ => false, - } - } + fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; - fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) - } - _ => false, - } - } + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool; + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool; - fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.kind, MacroCallKind::Attr { .. }) - } - _ => false, - } - } + /// Return whether this file is an include macro + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool; - fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { - match self.macro_file() { - Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - loc.def.is_attribute_derive() - } - None => false, - } - } -} + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool; + /// Return whether this file is an attr macro + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool; -pub trait MacroFileIdExt { - fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32; - fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo; + /// Return whether this file is the pseudo expansion of the derive attribute. + /// See [`crate::builtin_attr_macro::derive_attr_expand`]. + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool; } impl MacroFileIdExt for MacroFileId { + fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile { + db.lookup_intern_macro_call(self.macro_call_id).to_node(db) + } fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 { let mut level = 0; let mut macro_file = self; @@ -367,6 +302,39 @@ impl MacroFileIdExt for MacroFileId { fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo { ExpansionInfo::new(db, self) } + + fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + matches!( + db.lookup_intern_macro_call(self.macro_call_id).def.kind, + MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) + ) + } + + fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + matches!( + db.lookup_intern_macro_call(self.macro_call_id).def.kind, + MacroDefKind::BuiltInDerive(..) + ) + } + + fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + db.lookup_intern_macro_call(self.macro_call_id).def.is_include() + } + + fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) + } + + fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + matches!(loc.kind, MacroCallKind::Attr { .. }) + } + + fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool { + let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id); + loc.def.is_attribute_derive() + } } impl MacroDefId { @@ -653,14 +621,14 @@ impl ExpansionInfo { Some(tokens.map(move |token| InMacroFile::new(self.expanded.file_id, token))) } - /// Maps up the text range out of the expansion hierarchy back into the original file its from. - pub fn map_token_range_up( + /// Looks up the span at the given offset. + pub fn span_for_offset( &self, db: &dyn db::ExpandDatabase, - range: TextRange, + offset: TextSize, ) -> (FileRange, SyntaxContextId) { - debug_assert!(self.expanded.value.text_range().contains_range(range)); - let span = self.exp_map.span_at(range.start()); + debug_assert!(self.expanded.value.text_range().contains(offset)); + let span = self.exp_map.span_at(offset); let anchor_offset = db .ast_id_map(span.anchor.file_id.into()) .get_erased(span.anchor.ast_id) diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs index c2ff487ef91d6..51a044d8ef562 100644 --- a/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/crates/hir-ty/src/diagnostics/decl_check.rs @@ -24,7 +24,7 @@ use hir_def::{ }; use hir_expand::{ name::{AsName, Name}, - HirFileId, HirFileIdExt, + HirFileId, MacroFileIdExt, }; use stdx::{always, never}; use syntax::{ @@ -196,7 +196,7 @@ impl<'a> DeclValidator<'a> { AttrDefId::GenericParamId(_) => None, } .map_or(false, |file_id| { - file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()) + matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())) }) }; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 7e210740eac0c..4a0c384e8a305 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -128,6 +128,7 @@ pub use { hygiene::{marks_rev, SyntaxContextExt}, name::{known, Name}, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId, + MacroFileIdExt, }, hir_ty::{ display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, @@ -607,7 +608,7 @@ impl Module { let tree = loc.id.item_tree(db.upcast()); let node = &tree[loc.id.value]; let file_id = loc.id.file_id(); - if file_id.is_builtin_derive(db.upcast()) { + if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) { // these expansion come from us, diagnosing them is a waste of resources // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow continue; diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index ac70c27785c52..46835ec04e01b 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -20,8 +20,8 @@ use hir_def::{ AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ - db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId, - MacroFileId, MacroFileIdExt, + db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, MacroCallId, MacroFileId, + MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; @@ -865,7 +865,7 @@ impl<'db> SemanticsImpl<'db> { Some(parent) => Some(InFile::new(file_id, parent)), None => { self.cache(value.clone(), file_id); - file_id.call_node(db) + Some(file_id.macro_file()?.call_node(db)) } } }) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 8afa7e0659475..7a31e6df1faee 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -29,7 +29,7 @@ use hir_expand::{ mod_path::path, name, name::{AsName, Name}, - HirFileId, HirFileIdExt, InFile, MacroFileId, MacroFileIdExt, + HirFileId, InFile, MacroFileId, MacroFileIdExt, }; use hir_ty::{ diagnostics::{ @@ -939,11 +939,12 @@ fn scope_for_offset( } // FIXME handle attribute expansion - let source = iter::successors(file_id.call_node(db.upcast()), |it| { - it.file_id.call_node(db.upcast()) - }) - .find(|it| it.file_id == from_file) - .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; + let source = + iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| { + Some(it.file_id.macro_file()?.call_node(db.upcast())) + }) + .find(|it| it.file_id == from_file) + .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?; Some((source.value.text_range(), scope)) }) .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end()) diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index 2b03d575cb277..03112f6de5afe 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -49,10 +49,6 @@ impl DeclarationLocation { let node = resolve_node(db, self.hir_file_id, &self.ptr); node.as_ref().original_file_range(db.upcast()) } - - pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange { - InFile::new(self.hir_file_id, self.name_ptr.text_range()).original_file_range(db.upcast()) - } } fn resolve_node( diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 1b373bcb8ce93..b54e4204e3f39 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,4 +1,4 @@ -use hir::{HirFileIdExt, InFile, ModuleDef}; +use hir::{InFile, MacroFileIdExt, ModuleDef}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use itertools::Itertools; use syntax::{ @@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl( ) -> Option<()> { let attr = ctx.find_node_at_offset_with_descend::()?; let path = attr.path()?; - let hir_file = ctx.sema.hir_file_for(attr.syntax()); - if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) { + let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?; + if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) { return None; } - let InFile { file_id, value } = hir_file.call_node(ctx.db())?; + let InFile { file_id, value } = macro_file.call_node(ctx.db()); if file_id.is_macro() { // FIXME: make this work in macro files return None; @@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl( // collect the derive paths from the #[derive] expansion let current_derives = ctx .sema - .parse_or_expand(hir_file) + .parse_or_expand(macro_file.into()) .descendants() .filter_map(ast::Attr::cast) .filter_map(|attr| attr.path()) diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 653d9f0883840..024053effe423 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::{DescendPreference, HirFileIdExt, InFile, Semantics}; +use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics}; use ide_db::{ base_db::FileId, helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, @@ -44,15 +44,15 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< .descend_into_macros(DescendPreference::None, tok.clone()) .into_iter() .find_map(|descended| { - let hir_file = sema.hir_file_for(&descended.parent()?); - if !hir_file.is_derive_attr_pseudo_expansion(db) { + let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?; + if !macro_file.is_derive_attr_pseudo_expansion(db) { return None; } let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); // up map out of the #[derive] expansion let InFile { file_id, value: tokens } = - hir::InFile::new(hir_file, descended).upmap_once(db)?; + hir::InMacroFile::new(macro_file, descended).upmap_once(db); let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?; let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; let expansions = sema.expand_derive_macro(&attr)?; diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index c0b556f5441e7..d334e66d3dd6e 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -142,7 +142,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { Definition::Function(it) => it.source(db).map(|src| src.file_id), _ => None, }; - if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) { + if let Some(file_id) = file_id.filter(|file| file.macro_file().is_some()) { in_macro_expansion.entry(file_id).or_default().push(runnable); return; } diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index d510c11c3d5a9..0558f658fd190 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -1,6 +1,6 @@ //! Computes color for a single element. -use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics}; +use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, @@ -218,7 +218,10 @@ fn highlight_name_ref( // We can fix this for derive attributes since derive helpers are recorded, but not for // general attributes. None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR) - && !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) => + && !sema + .hir_file_for(name_ref.syntax()) + .macro_file() + .map_or(false, |it| it.is_derive_attr_pseudo_expansion(sema.db)) => { return HlTag::Symbol(SymbolKind::Attribute).into(); } From ba01ff4f88ba5573d2760916e1fdf21d3121686f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 Dec 2023 14:36:45 +0100 Subject: [PATCH 47/80] Fix diagnostics panicking when resolving to different files due to macros --- .../src/handlers/field_shorthand.rs | 9 ++- .../src/handlers/inactive_code.rs | 2 +- .../src/handlers/invalid_derive_target.rs | 2 +- .../src/handlers/json_is_not_rust.rs | 4 +- .../src/handlers/macro_error.rs | 20 ++++++ .../src/handlers/malformed_derive.rs | 2 +- .../src/handlers/mismatched_arg_count.rs | 5 +- .../src/handlers/type_mismatch.rs | 21 +++--- .../src/handlers/typed_hole.rs | 2 +- .../src/handlers/unlinked_file.rs | 10 ++- .../src/handlers/unresolved_module.rs | 7 +- .../src/handlers/useless_braces.rs | 7 +- crates/ide-diagnostics/src/lib.rs | 26 ++++--- crates/ide-diagnostics/src/tests.rs | 49 +++++++------ crates/rust-analyzer/src/diagnostics.rs | 69 +++++++++++++++---- 15 files changed, 158 insertions(+), 77 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/crates/ide-diagnostics/src/handlers/field_shorthand.rs index 9ed8199ae4d0c..45fc6f8e68d0e 100644 --- a/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -1,7 +1,10 @@ //! Suggests shortening `Foo { field: field }` to `Foo { field }` in both //! expressions and patterns. -use ide_db::{base_db::FileId, source_change::SourceChange}; +use ide_db::{ + base_db::{FileId, FileRange}, + source_change::SourceChange, +}; use syntax::{ast, match_ast, AstNode, SyntaxNode}; use text_edit::TextEdit; @@ -49,7 +52,7 @@ fn check_expr_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct initialization", - field_range, + FileRange { file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_expr_field_shorthand", @@ -93,7 +96,7 @@ fn check_pat_field_shorthand( Diagnostic::new( DiagnosticCode::Clippy("redundant_field_names"), "Shorthand struct pattern", - field_range, + FileRange { file_id, range: field_range }, ) .with_fixes(Some(vec![fix( "use_pat_field_shorthand", diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 9eb763d3e2c23..3b2e15a17887b 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -31,7 +31,7 @@ pub(crate) fn inactive_code( let res = Diagnostic::new( DiagnosticCode::Ra("inactive-code", Severity::WeakWarning), message, - ctx.sema.diagnostics_display_range(d.node.clone()).range, + ctx.sema.diagnostics_display_range(d.node.clone()), ) .with_unused(true); Some(res) diff --git a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs index 1ec17952b238d..f68f5b44b11bd 100644 --- a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs +++ b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs @@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target( ctx: &DiagnosticsContext<'_>, d: &hir::InvalidDeriveTarget, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; + let display_range = ctx.sema.diagnostics_display_range(d.node.clone()); Diagnostic::new( DiagnosticCode::RustcHardError("E0774"), diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 659b74445f8fe..d330973aaaa30 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -3,7 +3,7 @@ use hir::{PathResolution, Semantics}; use ide_db::{ - base_db::FileId, + base_db::{FileId, FileRange}, helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, source_change::SourceChangeBuilder, @@ -119,7 +119,7 @@ pub(crate) fn json_in_items( Diagnostic::new( DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning), "JSON syntax is not valid as a Rust item", - range, + FileRange { file_id, range }, ) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs index 2993950be04a0..099de4528d468 100644 --- a/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -264,4 +264,24 @@ fn f() { "#, ) } + + #[test] + fn include_does_not_break_diagnostics() { + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.insert("inactive-code".to_string()); + config.disabled.insert("unlinked-file".to_string()); + check_diagnostics_with_config( + config, + r#" +//- minicore: include +//- /lib.rs crate:lib +include!("include-me.rs"); +//- /include-me.rs +/// long doc that pushes the diagnostic range beyond the first file's text length + #[err] +//^^^^^^error: unresolved macro `err` +mod prim_never {} +"#, + ); + } } diff --git a/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/crates/ide-diagnostics/src/handlers/malformed_derive.rs index fc57dde69f2a0..6202d15853967 100644 --- a/crates/ide-diagnostics/src/handlers/malformed_derive.rs +++ b/crates/ide-diagnostics/src/handlers/malformed_derive.rs @@ -7,7 +7,7 @@ pub(crate) fn malformed_derive( ctx: &DiagnosticsContext<'_>, d: &hir::MalformedDerive, ) -> Diagnostic { - let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; + let display_range = ctx.sema.diagnostics_display_range(d.node.clone()); Diagnostic::new( DiagnosticCode::RustcHardError("E0777"), diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 06ba13bcc55c4..8296018022cb2 100644 --- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -1,8 +1,9 @@ use either::Either; use hir::InFile; +use ide_db::base_db::FileRange; use syntax::{ ast::{self, HasArgList}, - AstNode, SyntaxNodePtr, TextRange, + AstNode, SyntaxNodePtr, }; use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -48,7 +49,7 @@ fn invalid_args_range( source: InFile, expected: usize, found: usize, -) -> TextRange { +) -> FileRange { adjusted_display_range::>(ctx, source, &|expr| { let (text_range, r_paren_token, expected_arg) = match expr { Either::Left(ast::Expr::CallExpr(call)) => { diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index fd00535d0c34f..70beb9468938c 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -35,14 +35,10 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) Some(salient_token_range) }, ), - pat => { - ctx.sema - .diagnostics_display_range(InFile { - file_id: d.expr_or_pat.file_id, - value: pat.syntax_node_ptr(), - }) - .range - } + pat => ctx.sema.diagnostics_display_range(InFile { + file_id: d.expr_or_pat.file_id, + value: pat.syntax_node_ptr(), + }), }; let mut diag = Diagnostic::new( DiagnosticCode::RustcHardError("E0308"), @@ -84,7 +80,7 @@ fn add_reference( expr_ptr: &InFile>, acc: &mut Vec, ) -> Option<()> { - let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range; + let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())); let (_, mutability) = d.expected.as_reference()?; let actual_with_ref = Type::reference(&d.actual, mutability); @@ -94,10 +90,9 @@ fn add_reference( let ampersands = format!("&{}", mutability.as_keyword_for_ref()); - let edit = TextEdit::insert(range.start(), ampersands); - let source_change = - SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit); - acc.push(fix("add_reference_here", "Add reference here", source_change, range)); + let edit = TextEdit::insert(range.range.start(), ampersands); + let source_change = SourceChange::from_text_edit(range.file_id, edit); + acc.push(fix("add_reference_here", "Add reference here", source_change, range.range)); Some(()) } diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index ea5c7564d343f..a740e332bbddf 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -26,7 +26,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di ) }; - Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range) + Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range) .with_fixes(fixes) } diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index e04f27c27fdf7..becc24ab21ecb 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; use ide_db::{ - base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, + base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt}, source_change::SourceChange, RootDatabase, }; @@ -46,8 +46,12 @@ pub(crate) fn unlinked_file( .unwrap_or(range); acc.push( - Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range) - .with_fixes(fixes), + Diagnostic::new( + DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), + message, + FileRange { file_id, range }, + ) + .with_fixes(fixes), ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 4f81ec0512589..e90d385bab8c5 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -87,7 +87,12 @@ mod baz {} "E0583", ), message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs", - range: 0..8, + range: FileRange { + file_id: FileId( + 0, + ), + range: 0..8, + }, severity: Error, unused: false, experimental: false, diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index c4ac59ec2a4d0..8dce2af23e328 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -1,5 +1,8 @@ use hir::InFile; -use ide_db::{base_db::FileId, source_change::SourceChange}; +use ide_db::{ + base_db::{FileId, FileRange}, + source_change::SourceChange, +}; use itertools::Itertools; use syntax::{ast, AstNode, SyntaxNode}; use text_edit::TextEdit; @@ -38,7 +41,7 @@ pub(crate) fn useless_braces( Diagnostic::new( DiagnosticCode::RustcLint("unused_braces"), "Unnecessary braces in use statement".to_string(), - use_range, + FileRange { file_id, range: use_range }, ) .with_main_node(InFile::new(file_id.into(), node.clone())) .with_fixes(Some(vec![fix( diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index f9fb921f4053a..35272e8726458 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -133,7 +133,7 @@ impl DiagnosticCode { pub struct Diagnostic { pub code: DiagnosticCode, pub message: String, - pub range: TextRange, + pub range: FileRange, pub severity: Severity, pub unused: bool, pub experimental: bool, @@ -143,7 +143,7 @@ pub struct Diagnostic { } impl Diagnostic { - fn new(code: DiagnosticCode, message: impl Into, range: TextRange) -> Diagnostic { + fn new(code: DiagnosticCode, message: impl Into, range: FileRange) -> Diagnostic { let message = message.into(); Diagnostic { code, @@ -172,7 +172,7 @@ impl Diagnostic { node: InFile, ) -> Diagnostic { let file_id = node.file_id; - Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range) + Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone())) .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) } @@ -267,7 +267,7 @@ impl DiagnosticsContext<'_> { &self, node: &InFile, precise_location: Option, - ) -> TextRange { + ) -> FileRange { let sema = &self.sema; (|| { let precise_location = precise_location?; @@ -280,10 +280,11 @@ impl DiagnosticsContext<'_> { } })() .unwrap_or_else(|| sema.diagnostics_display_range(node.clone())) - .range } } +/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files +/// due to macros. pub fn diagnostics( db: &RootDatabase, config: &DiagnosticsConfig, @@ -300,7 +301,7 @@ pub fn diagnostics( Diagnostic::new( DiagnosticCode::RustcHardError("syntax-error"), format!("Syntax Error: {err}"), - err.range(), + FileRange { file_id, range: err.range() }, ) })); @@ -569,12 +570,15 @@ fn adjusted_display_range( ctx: &DiagnosticsContext<'_>, diag_ptr: InFile, adj: &dyn Fn(N) -> Option, -) -> TextRange { +) -> FileRange { let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr); let source_file = ctx.sema.db.parse(file_id); - find_node_at_range::(&source_file.syntax_node(), range) - .filter(|it| it.syntax().text_range() == range) - .and_then(adj) - .unwrap_or(range) + FileRange { + file_id, + range: find_node_at_range::(&source_file.syntax_node(), range) + .filter(|it| it.syntax().text_range() == range) + .and_then(adj) + .unwrap_or(range), + } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index c766a018bfd05..48e0363c9ca8d 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -7,6 +7,7 @@ use ide_db::{ base_db::{fixture::WithFixture, SourceDatabaseExt}, LineIndexDatabase, RootDatabase, }; +use itertools::Itertools; use stdx::trim_indent; use test_utils::{assert_eq_text, extract_annotations, MiniCore}; @@ -103,33 +104,39 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { #[track_caller] pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) { let (db, files) = RootDatabase::with_many_files(ra_fixture); + let mut annotations = files + .iter() + .copied() + .flat_map(|file_id| { + super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map( + |d| { + let mut annotation = String::new(); + if let Some(fixes) = &d.fixes { + assert!(!fixes.is_empty()); + annotation.push_str("💡 ") + } + annotation.push_str(match d.severity { + Severity::Error => "error", + Severity::WeakWarning => "weak", + Severity::Warning => "warn", + Severity::Allow => "allow", + }); + annotation.push_str(": "); + annotation.push_str(&d.message); + (d.range, annotation) + }, + ) + }) + .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation))) + .into_group_map(); for file_id in files { let line_index = db.line_index(file_id); - let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); + let mut actual = annotations.remove(&file_id).unwrap_or_default(); let expected = extract_annotations(&db.file_text(file_id)); - let mut actual = diagnostics - .into_iter() - .map(|d| { - let mut annotation = String::new(); - if let Some(fixes) = &d.fixes { - assert!(!fixes.is_empty()); - annotation.push_str("💡 ") - } - annotation.push_str(match d.severity { - Severity::Error => "error", - Severity::WeakWarning => "weak", - Severity::Warning => "warn", - Severity::Allow => "allow", - }); - annotation.push_str(": "); - annotation.push_str(&d.message); - (d.range, annotation) - }) - .collect::>(); actual.sort_by_key(|(range, _)| range.start()); if expected.is_empty() { - // makes minicore smoke test debugable + // makes minicore smoke test debuggable for (e, _) in &actual { eprintln!( "Code in range {e:?} = {}", diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 71701ef16179f..f80beb9caadd8 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -5,6 +5,7 @@ use std::mem; use ide::FileId; use ide_db::FxHashMap; +use itertools::Itertools; use nohash_hasher::{IntMap, IntSet}; use rustc_hash::FxHashSet; use triomphe::Arc; @@ -129,8 +130,28 @@ pub(crate) fn fetch_native_diagnostics( ) -> Vec<(FileId, Vec)> { let _p = profile::span("fetch_native_diagnostics"); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); - subscriptions - .into_iter() + + let convert_diagnostic = + |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic { + range: lsp::to_proto::range(&line_index, d.range.range), + severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), + code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())), + code_description: Some(lsp_types::CodeDescription { + href: lsp_types::Url::parse(&d.code.url()).unwrap(), + }), + source: Some("rust-analyzer".to_string()), + message: d.message, + related_information: None, + tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), + data: None, + }; + + // the diagnostics produced may point to different files not requested by the concrete request, + // put those into here and filter later + let mut odd_ones = Vec::new(); + let mut diagnostics = subscriptions + .iter() + .copied() .filter_map(|file_id| { let line_index = snapshot.file_line_index(file_id).ok()?; let diagnostics = snapshot @@ -142,21 +163,39 @@ pub(crate) fn fetch_native_diagnostics( ) .ok()? .into_iter() - .map(move |d| lsp_types::Diagnostic { - range: lsp::to_proto::range(&line_index, d.range), - severity: Some(lsp::to_proto::diagnostic_severity(d.severity)), - code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())), - code_description: Some(lsp_types::CodeDescription { - href: lsp_types::Url::parse(&d.code.url()).unwrap(), - }), - source: Some("rust-analyzer".to_string()), - message: d.message, - related_information: None, - tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]), - data: None, + .filter_map(|d| { + if d.range.file_id == file_id { + Some(convert_diagnostic(&line_index, d)) + } else { + odd_ones.push(d); + None + } }) .collect::>(); Some((file_id, diagnostics)) }) - .collect() + .collect::>(); + + // Add back any diagnostics that point to files we are subscribed to + for (file_id, group) in odd_ones + .into_iter() + .sorted_by_key(|it| it.range.file_id) + .group_by(|it| it.range.file_id) + .into_iter() + { + if !subscriptions.contains(&file_id) { + continue; + } + let Some((_, diagnostics)) = diagnostics.iter_mut().find(|&&mut (id, _)| id == file_id) + else { + continue; + }; + let Some(line_index) = snapshot.file_line_index(file_id).ok() else { + break; + }; + for diagnostic in group { + diagnostics.push(convert_diagnostic(&line_index, diagnostic)); + } + } + diagnostics } From 374affea3c7f3b3a355b1c92a74d5d91eef98776 Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Wed, 6 Dec 2023 15:11:45 +0100 Subject: [PATCH 48/80] make ParamLoweringMode accessible --- crates/hir-ty/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 907a303019649..33dffafa26004 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -73,8 +73,8 @@ pub use infer::{ }; pub use interner::Interner; pub use lower::{ - associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId, - TyLoweringContext, ValueTyDefId, + associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode, + TyDefId, TyLoweringContext, ValueTyDefId, }; pub use mapping::{ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, From 5d84edd4306600a844010dfb4356e0e8e1febc31 Mon Sep 17 00:00:00 2001 From: Young-Flash <871946895@qq.com> Date: Tue, 28 Nov 2023 21:15:45 +0800 Subject: [PATCH 49/80] feat: add trait_impl_reduntant_assoc_item diagnostic --- crates/hir/src/diagnostics.rs | 8 +++ crates/hir/src/lib.rs | 19 +++++++ .../trait_impl_reduntant_assoc_item.rs | 56 +++++++++++++++++++ crates/ide-diagnostics/src/lib.rs | 2 + 4 files changed, 85 insertions(+) create mode 100644 crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index cf9a2b73d9b1f..74c1b97a2e37a 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -55,6 +55,7 @@ diagnostics![ ReplaceFilterMapNextWithFindMap, TraitImplIncorrectSafety, TraitImplMissingAssocItems, + TraitImplReduntantAssocItems, TraitImplOrphan, TypedHole, TypeMismatch, @@ -310,3 +311,10 @@ pub struct TraitImplMissingAssocItems { pub impl_: AstPtr, pub missing: Vec<(Name, AssocItem)>, } + +#[derive(Debug, PartialEq, Eq)] +pub struct TraitImplReduntantAssocItems { + pub file_id: HirFileId, + pub impl_: AstPtr, + pub reduntant: Vec<(Name, AssocItem)>, +} \ No newline at end of file diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 4a0c384e8a305..5e6f3c7a99fdc 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -693,6 +693,25 @@ impl Module { }, )); + let reduntant: Vec<_> = impl_assoc_items_scratch.iter() + .filter(|(id, name)| { + !required_items.clone().any(|(impl_name, impl_item)| { + discriminant(impl_item) == discriminant(id) && impl_name == name + }) + }) + .map(|(item, name)| (name.clone(), AssocItem::from(*item))) + .collect(); + if !reduntant.is_empty() { + acc.push( + TraitImplReduntantAssocItems { + impl_: ast_id_map.get(node.ast_id()), + file_id, + reduntant, + } + .into(), + ) + } + let missing: Vec<_> = required_items .filter(|(name, id)| { !impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| { diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs new file mode 100644 index 0000000000000..446ce7d9fe18f --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs @@ -0,0 +1,56 @@ +use hir::InFile; +use itertools::Itertools; +use syntax::{ast, AstNode}; + +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: trait-impl-reduntant-assoc_item +// +// Diagnoses reduntant trait items in a trait impl. +pub(crate) fn trait_impl_reduntant_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::TraitImplReduntantAssocItems, +) -> Diagnostic { + let reduntant = d.reduntant.iter().format_with(", ", |(name, item), f| { + f(&match *item { + hir::AssocItem::Function(_) => "`fn ", + hir::AssocItem::Const(_) => "`const ", + hir::AssocItem::TypeAlias(_) => "`type ", + })?; + f(&name.display(ctx.sema.db))?; + f(&"`") + }); + Diagnostic::new( + DiagnosticCode::RustcHardError("E0407"), + format!("{reduntant} is not a member of trait"), + adjusted_display_range::( + ctx, + InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() }, + &|impl_| impl_.trait_().map(|t| t.syntax().text_range()), + ), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn trait_with_default_value() { + check_diagnostics( + r#" +trait Marker { + fn boo(); +} +struct Foo; +impl Marker for Foo { + //^^^^^^ error: `type T`, `const FLAG`, `fn bar` is not a member of trait + type T = i32; + const FLAG: bool = false; + fn bar() {} + fn boo() {} +} + "#, + ) + } +} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 35272e8726458..c39e572b42d40 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -47,6 +47,7 @@ mod handlers { pub(crate) mod trait_impl_orphan; pub(crate) mod trait_impl_incorrect_safety; pub(crate) mod trait_impl_missing_assoc_item; + pub(crate) mod trait_impl_reduntant_assoc_item; pub(crate) mod typed_hole; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; @@ -364,6 +365,7 @@ pub fn diagnostics( AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d), AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d), + AnyDiagnostic::TraitImplReduntantAssocItems(d) => handlers::trait_impl_reduntant_assoc_item::trait_impl_reduntant_assoc_item(&ctx, &d), AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d), AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), From 861e47431b7d3f4d1a1d7f07f026a87917384a9f Mon Sep 17 00:00:00 2001 From: Young-Flash <871946895@qq.com> Date: Fri, 1 Dec 2023 19:09:42 +0800 Subject: [PATCH 50/80] update: make each trait_impl_reduntant_assoc_item into individual diagnostic --- crates/hir/src/diagnostics.rs | 12 ++-- crates/hir/src/lib.rs | 22 +++--- .../trait_impl_redundant_assoc_item.rs | 72 +++++++++++++++++++ .../trait_impl_reduntant_assoc_item.rs | 56 --------------- crates/ide-diagnostics/src/lib.rs | 4 +- 5 files changed, 91 insertions(+), 75 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs delete mode 100644 crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 74c1b97a2e37a..52c1c27a7fa93 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -12,7 +12,7 @@ use hir_def::path::ModPath; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange}; -use crate::{AssocItem, Field, Local, MacroKind, Type}; +use crate::{AssocItem, Field, Local, MacroKind, Trait, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { @@ -55,7 +55,7 @@ diagnostics![ ReplaceFilterMapNextWithFindMap, TraitImplIncorrectSafety, TraitImplMissingAssocItems, - TraitImplReduntantAssocItems, + TraitImplRedundantAssocItems, TraitImplOrphan, TypedHole, TypeMismatch, @@ -313,8 +313,8 @@ pub struct TraitImplMissingAssocItems { } #[derive(Debug, PartialEq, Eq)] -pub struct TraitImplReduntantAssocItems { +pub struct TraitImplRedundantAssocItems { pub file_id: HirFileId, - pub impl_: AstPtr, - pub reduntant: Vec<(Name, AssocItem)>, -} \ No newline at end of file + pub trait_: Trait, + pub assoc_item: (Name, AssocItem), +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 5e6f3c7a99fdc..4db1a02c0cd2a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -693,20 +693,20 @@ impl Module { }, )); - let reduntant: Vec<_> = impl_assoc_items_scratch.iter() - .filter(|(id, name)| { - !required_items.clone().any(|(impl_name, impl_item)| { - discriminant(impl_item) == discriminant(id) && impl_name == name + let redundant = impl_assoc_items_scratch + .iter() + .filter(|(id, name)| { + !items.iter().any(|(impl_name, impl_item)| { + discriminant(impl_item) == discriminant(id) && impl_name == name + }) }) - }) - .map(|(item, name)| (name.clone(), AssocItem::from(*item))) - .collect(); - if !reduntant.is_empty() { + .map(|(item, name)| (name.clone(), AssocItem::from(*item))); + for (name, assoc_item) in redundant { acc.push( - TraitImplReduntantAssocItems { - impl_: ast_id_map.get(node.ast_id()), + TraitImplRedundantAssocItems { + trait_, file_id, - reduntant, + assoc_item: (name, assoc_item), } .into(), ) diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs new file mode 100644 index 0000000000000..6aded11382cbd --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -0,0 +1,72 @@ +use hir::{db::ExpandDatabase, Const, Function, HasSource, TypeAlias}; + +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: trait-impl-redundant-assoc_item +// +// Diagnoses redundant trait items in a trait impl. +pub(crate) fn trait_impl_redundant_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::TraitImplRedundantAssocItems, +) -> Diagnostic { + let name = d.assoc_item.0.clone(); + let assoc_item = d.assoc_item.1; + let db = ctx.sema.db; + + let range = db.parse_or_expand(d.file_id).text_range(); + let trait_name = d.trait_.name(db).to_smol_str(); + + let (redundant_item_name, diagnostic_range) = match assoc_item { + hir::AssocItem::Function(id) => ( + format!("`fn {}`", name.display(db)), + Function::from(id).source(db).map(|it| it.syntax().value.text_range()).unwrap_or(range), + ), + hir::AssocItem::Const(id) => ( + format!("`const {}`", name.display(db)), + Const::from(id).source(db).map(|it| it.syntax().value.text_range()).unwrap_or(range), + ), + hir::AssocItem::TypeAlias(id) => ( + format!("`type {}`", name.display(db)), + TypeAlias::from(id) + .source(db) + .map(|it| it.syntax().value.text_range()) + .unwrap_or(range), + ), + }; + + Diagnostic::new( + DiagnosticCode::RustcHardError("E0407"), + format!("{redundant_item_name} is not a member of trait `{trait_name}`"), + diagnostic_range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn trait_with_default_value() { + check_diagnostics( + r#" +trait Marker { + const FLAG: bool = false; + fn boo(); + fn foo () {} +} +struct Foo; +impl Marker for Foo { + type T = i32; + //^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker` + + const FLAG: bool = true; + + fn bar() {} + //^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker` + + fn boo() {} +} + "#, + ) + } +} diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs deleted file mode 100644 index 446ce7d9fe18f..0000000000000 --- a/crates/ide-diagnostics/src/handlers/trait_impl_reduntant_assoc_item.rs +++ /dev/null @@ -1,56 +0,0 @@ -use hir::InFile; -use itertools::Itertools; -use syntax::{ast, AstNode}; - -use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; - -// Diagnostic: trait-impl-reduntant-assoc_item -// -// Diagnoses reduntant trait items in a trait impl. -pub(crate) fn trait_impl_reduntant_assoc_item( - ctx: &DiagnosticsContext<'_>, - d: &hir::TraitImplReduntantAssocItems, -) -> Diagnostic { - let reduntant = d.reduntant.iter().format_with(", ", |(name, item), f| { - f(&match *item { - hir::AssocItem::Function(_) => "`fn ", - hir::AssocItem::Const(_) => "`const ", - hir::AssocItem::TypeAlias(_) => "`type ", - })?; - f(&name.display(ctx.sema.db))?; - f(&"`") - }); - Diagnostic::new( - DiagnosticCode::RustcHardError("E0407"), - format!("{reduntant} is not a member of trait"), - adjusted_display_range::( - ctx, - InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() }, - &|impl_| impl_.trait_().map(|t| t.syntax().text_range()), - ), - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_diagnostics; - - #[test] - fn trait_with_default_value() { - check_diagnostics( - r#" -trait Marker { - fn boo(); -} -struct Foo; -impl Marker for Foo { - //^^^^^^ error: `type T`, `const FLAG`, `fn bar` is not a member of trait - type T = i32; - const FLAG: bool = false; - fn bar() {} - fn boo() {} -} - "#, - ) - } -} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index c39e572b42d40..6cfd5f1832084 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -47,7 +47,7 @@ mod handlers { pub(crate) mod trait_impl_orphan; pub(crate) mod trait_impl_incorrect_safety; pub(crate) mod trait_impl_missing_assoc_item; - pub(crate) mod trait_impl_reduntant_assoc_item; + pub(crate) mod trait_impl_redundant_assoc_item; pub(crate) mod typed_hole; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; @@ -365,7 +365,7 @@ pub fn diagnostics( AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d), AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d), - AnyDiagnostic::TraitImplReduntantAssocItems(d) => handlers::trait_impl_reduntant_assoc_item::trait_impl_reduntant_assoc_item(&ctx, &d), + AnyDiagnostic::TraitImplRedundantAssocItems(d) => handlers::trait_impl_redundant_assoc_item::trait_impl_redundant_assoc_item(&ctx, &d), AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d), AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), From 80dc20f7d8b42e16fc68385ba3fab52402893ddf Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 6 Dec 2023 17:39:41 +0100 Subject: [PATCH 51/80] fix: Don't print proc-macro panic backtraces in the logs --- crates/hir-expand/src/builtin_fn_macro.rs | 2 + crates/hir-expand/src/lib.rs | 5 ++ crates/hir-expand/src/proc_macro.rs | 2 +- crates/ide/src/navigation_target.rs | 2 - crates/ide/src/syntax_highlighting.rs | 8 +--- .../test_data/highlight_macros.html | 4 +- .../test_data/highlight_strings.html | 2 +- crates/ide/src/syntax_highlighting/tests.rs | 6 ++- crates/proc-macro-srv/src/dylib.rs | 2 +- crates/proc-macro-srv/src/lib.rs | 4 +- crates/proc-macro-srv/src/proc_macros.rs | 46 +++++++++---------- 11 files changed, 41 insertions(+), 42 deletions(-) diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 4b2f27bd46506..903c21c84eec9 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -152,6 +152,8 @@ fn line_expand( span: SpanData, ) -> ExpandResult { // dummy implementation for type-checking purposes + // Note that `line!` and `column!` will never be implemented properly, as they are by definition + // not incremental ExpandResult::ok(tt::Subtree { delimiter: tt::Delimiter::dummy_invisible(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index fe336aa1421e2..a159cf92a7ac3 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -78,6 +78,7 @@ pub enum ExpandError { Mbe(mbe::ExpandError), RecursionOverflowPoisoned, Other(Box>), + ProcMacroPanic(Box>), } impl ExpandError { @@ -100,6 +101,10 @@ impl fmt::Display for ExpandError { ExpandError::RecursionOverflowPoisoned => { f.write_str("overflow expanding the original macro") } + ExpandError::ProcMacroPanic(it) => { + f.write_str("proc-macro panicked: ")?; + f.write_str(it) + } ExpandError::Other(it) => f.write_str(it), } } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index ccae4c288e65c..de577796831fd 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -85,7 +85,7 @@ impl ProcMacroExpander { ProcMacroExpansionError::System(text) | ProcMacroExpansionError::Panic(text) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(text), + ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())), ), }, } diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 31f4aad41e521..6cb7d7724d5f7 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -736,8 +736,6 @@ fn orig_range_with_focus( .definition_range(db) }; - // FIXME What about include!d things - let value_range = InFile::new(hir_file, value).original_file_range_opt(db); let ((call_site_range, call_site_focus), def_site) = match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 366a3c969f933..307812156e92b 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -404,13 +404,7 @@ fn traverse( }) .unwrap() } else { - sema.descend_into_macros_single( - match attr_or_derive_item { - Some(AttrOrDerive::Attr(_)) => DescendPreference::SameKind, - Some(AttrOrDerive::Derive(_)) | None => DescendPreference::None, - }, - token, - ) + sema.descend_into_macros_single(DescendPreference::SameKind, token) }; match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index b8d38a60fc016..e8b3a38c9e0f4 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -43,7 +43,9 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
proc_macros::mirror! {
+
use proc_macros::{mirror, identity, DeriveIdentity};
+
+mirror! {
     {
         ,i32 :x pub
         ,i32 :y pub
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 75cb6223e0e62..84a823363f68f 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -118,7 +118,7 @@
     println!("Hello {:+}!", 5);
     println!("{:#x}!", 27);
     println!("Hello {:05}!", 5);
-    println!("Hello {:05}!", -5);
+    println!("Hello {:05}!", -5);
     println!("{:#010x}!", 27);
     println!("Hello {0} is {1:.5}", "x", 0.01);
     println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index fcfd3c92571b5..afb6c555b4afd 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -47,10 +47,12 @@ struct Foo;
 fn macros() {
     check_highlighting(
         r#"
-//- proc_macros: mirror
+//- proc_macros: mirror, identity, derive_identity
 //- minicore: fmt, include, concat
 //- /lib.rs crate:lib
-proc_macros::mirror! {
+use proc_macros::{mirror, identity, DeriveIdentity};
+
+mirror! {
     {
         ,i32 :x pub
         ,i32 :y pub
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index 80bce3af1a006..f20e6832f6e98 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -160,7 +160,7 @@ impl Expander {
             .inner
             .proc_macros
             .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site);
-        result.map_err(|e| e.as_str().unwrap_or_else(|| "".to_string()))
+        result.map_err(|e| e.into_string().unwrap_or_default())
     }
 
     pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 790e7936cdc52..5b03813b19a84 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -160,8 +160,8 @@ pub struct PanicMessage {
 }
 
 impl PanicMessage {
-    pub fn as_str(&self) -> Option {
-        self.message.clone()
+    pub fn into_string(self) -> Option {
+        self.message
     }
 }
 
diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs
index 4f87fa281b77e..716b85d096d07 100644
--- a/crates/proc-macro-srv/src/proc_macros.rs
+++ b/crates/proc-macro-srv/src/proc_macros.rs
@@ -1,16 +1,17 @@
 //! Proc macro ABI
 
 use libloading::Library;
+use proc_macro::bridge;
 use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
 
 use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
 
 pub(crate) struct ProcMacros {
-    exported_macros: Vec,
+    exported_macros: Vec,
 }
 
-impl From for crate::PanicMessage {
-    fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+impl From for crate::PanicMessage {
+    fn from(p: bridge::PanicMessage) -> Self {
         Self { message: p.as_str().map(|s| s.to_string()) }
     }
 }
@@ -31,9 +32,8 @@ impl ProcMacros {
         info: RustCInfo,
     ) -> Result {
         if info.version_string == crate::RUSTC_VERSION_STRING {
-            let macros = unsafe {
-                lib.get::<&&[proc_macro::bridge::client::ProcMacro]>(symbol_name.as_bytes())
-            }?;
+            let macros =
+                unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?;
 
             return Ok(Self { exported_macros: macros.to_vec() });
         }
@@ -57,11 +57,11 @@ impl ProcMacros {
 
         for proc_macro in &self.exported_macros {
             match proc_macro {
-                proc_macro::bridge::client::ProcMacro::CustomDerive {
-                    trait_name, client, ..
-                } if *trait_name == macro_name => {
+                bridge::client::ProcMacro::CustomDerive { trait_name, client, .. }
+                    if *trait_name == macro_name =>
+                {
                     let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
+                        &bridge::server::SameThread,
                         crate::server::RustAnalyzer {
                             interner: &SYMBOL_INTERNER,
                             call_site,
@@ -69,17 +69,15 @@ impl ProcMacros {
                             mixed_site,
                         },
                         parsed_body,
-                        true,
+                        false,
                     );
                     return res
                         .map(|it| it.into_subtree(call_site))
                         .map_err(crate::PanicMessage::from);
                 }
-                proc_macro::bridge::client::ProcMacro::Bang { name, client }
-                    if *name == macro_name =>
-                {
+                bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
                     let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
+                        &bridge::server::SameThread,
                         crate::server::RustAnalyzer {
                             interner: &SYMBOL_INTERNER,
                             call_site,
@@ -87,17 +85,15 @@ impl ProcMacros {
                             mixed_site,
                         },
                         parsed_body,
-                        true,
+                        false,
                     );
                     return res
                         .map(|it| it.into_subtree(call_site))
                         .map_err(crate::PanicMessage::from);
                 }
-                proc_macro::bridge::client::ProcMacro::Attr { name, client }
-                    if *name == macro_name =>
-                {
+                bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
                     let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
+                        &bridge::server::SameThread,
                         crate::server::RustAnalyzer {
                             interner: &SYMBOL_INTERNER,
 
@@ -107,7 +103,7 @@ impl ProcMacros {
                         },
                         parsed_attributes,
                         parsed_body,
-                        true,
+                        false,
                     );
                     return res
                         .map(|it| it.into_subtree(call_site))
@@ -117,20 +113,20 @@ impl ProcMacros {
             }
         }
 
-        Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+        Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into())
     }
 
     pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
         self.exported_macros
             .iter()
             .map(|proc_macro| match proc_macro {
-                proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+                bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
                     (trait_name.to_string(), ProcMacroKind::CustomDerive)
                 }
-                proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+                bridge::client::ProcMacro::Bang { name, .. } => {
                     (name.to_string(), ProcMacroKind::FuncLike)
                 }
-                proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+                bridge::client::ProcMacro::Attr { name, .. } => {
                     (name.to_string(), ProcMacroKind::Attr)
                 }
             })

From c17dcc8d9058ef75c7c52fc474c9d15d2006340d Mon Sep 17 00:00:00 2001
From: David Barsky 
Date: Tue, 28 Nov 2023 10:36:01 -0500
Subject: [PATCH 52/80] internal: switch to `Arc::from_iter`

---
 crates/hir-expand/src/attrs.rs               | 123 ++++++++-----------
 crates/hir-ty/src/lib.rs                     |   6 +-
 crates/hir-ty/src/lower.rs                   |  90 ++++++--------
 crates/hir-ty/src/method_resolution.rs       |   9 +-
 crates/rust-analyzer/src/global_state.rs     |   8 +-
 crates/rust-analyzer/src/handlers/request.rs |   6 +-
 crates/rust-analyzer/src/reload.rs           |  38 +++---
 7 files changed, 117 insertions(+), 163 deletions(-)

diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index edaf2f06a4e86..b8fc30c91189a 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -44,21 +44,18 @@ impl RawAttrs {
         owner: &dyn ast::HasAttrs,
         span_map: SpanMapRef<'_>,
     ) -> Self {
-        let entries = collect_attrs(owner)
-            .filter_map(|(id, attr)| match attr {
-                Either::Left(attr) => {
-                    attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
-                }
-                Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
-                    id,
-                    input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
-                    path: Interned::new(ModPath::from(crate::name!(doc))),
-                    ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
-                }),
-            })
-            .collect::>();
-        // FIXME: use `Arc::from_iter` when it becomes available
-        let entries: Arc<[Attr]> = Arc::from(entries);
+        let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
+            Either::Left(attr) => {
+                attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+            }
+            Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+                id,
+                input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+                path: Interned::new(ModPath::from(crate::name!(doc))),
+                ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
+            }),
+        });
+        let entries: Arc<[Attr]> = Arc::from_iter(entries);
 
         Self { entries: if entries.is_empty() { None } else { Some(entries) } }
     }
@@ -79,19 +76,13 @@ impl RawAttrs {
             (Some(a), Some(b)) => {
                 let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
                 Self {
-                    entries: Some(Arc::from(
-                        a.iter()
-                            .cloned()
-                            .chain(b.iter().map(|it| {
-                                let mut it = it.clone();
-                                it.id.id = it.id.ast_index() as u32 + last_ast_index
-                                    | (it.id.cfg_attr_index().unwrap_or(0) as u32)
-                                        << AttrId::AST_INDEX_BITS;
-                                it
-                            }))
-                            // FIXME: use `Arc::from_iter` when it becomes available
-                            .collect::>(),
-                    )),
+                    entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
+                        let mut it = it.clone();
+                        it.id.id = it.id.ast_index() as u32 + last_ast_index
+                            | (it.id.cfg_attr_index().unwrap_or(0) as u32)
+                                << AttrId::AST_INDEX_BITS;
+                        it
+                    })))),
                 }
             }
         }
@@ -108,49 +99,43 @@ impl RawAttrs {
         }
 
         let crate_graph = db.crate_graph();
-        let new_attrs = Arc::from(
-            self.iter()
-                .flat_map(|attr| -> SmallVec<[_; 1]> {
-                    let is_cfg_attr =
-                        attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
-                    if !is_cfg_attr {
-                        return smallvec![attr.clone()];
-                    }
-
-                    let subtree = match attr.token_tree_value() {
-                        Some(it) => it,
-                        _ => return smallvec![attr.clone()],
-                    };
+        let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
+            let is_cfg_attr =
+                attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+            if !is_cfg_attr {
+                return smallvec![attr.clone()];
+            }
 
-                    let (cfg, parts) = match parse_cfg_attr_input(subtree) {
-                        Some(it) => it,
-                        None => return smallvec![attr.clone()],
+            let subtree = match attr.token_tree_value() {
+                Some(it) => it,
+                _ => return smallvec![attr.clone()],
+            };
+
+            let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+                Some(it) => it,
+                None => return smallvec![attr.clone()],
+            };
+            let index = attr.id;
+            let attrs =
+                parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
+                    let tree = Subtree {
+                        delimiter: tt::Delimiter::dummy_invisible(),
+                        token_trees: attr.to_vec(),
                     };
-                    let index = attr.id;
-                    let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
-                        |(idx, attr)| {
-                            let tree = Subtree {
-                                delimiter: tt::Delimiter::dummy_invisible(),
-                                token_trees: attr.to_vec(),
-                            };
-                            Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
-                        },
-                    );
-
-                    let cfg_options = &crate_graph[krate].cfg_options;
-                    let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
-                    let cfg = CfgExpr::parse(&cfg);
-                    if cfg_options.check(&cfg) == Some(false) {
-                        smallvec![]
-                    } else {
-                        cov_mark::hit!(cfg_attr_active);
-
-                        attrs.collect()
-                    }
-                })
-                // FIXME: use `Arc::from_iter` when it becomes available
-                .collect::>(),
-        );
+                    Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
+                });
+
+            let cfg_options = &crate_graph[krate].cfg_options;
+            let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+            let cfg = CfgExpr::parse(&cfg);
+            if cfg_options.check(&cfg) == Some(false) {
+                smallvec![]
+            } else {
+                cov_mark::hit!(cfg_attr_active);
+
+                attrs.collect()
+            }
+        }));
 
         RawAttrs { entries: Some(new_attrs) }
     }
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 33dffafa26004..5a3e423f152ae 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -322,8 +322,7 @@ impl CallableSig {
     pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
         CallableSig {
             // FIXME: what to do about lifetime params? -> return PolyFnSig
-            // FIXME: use `Arc::from_iter` when it becomes available
-            params_and_return: Arc::from(
+            params_and_return: Arc::from_iter(
                 fn_ptr
                     .substitution
                     .clone()
@@ -332,8 +331,7 @@ impl CallableSig {
                     .0
                     .as_slice(Interner)
                     .iter()
-                    .map(|arg| arg.assert_ty_ref(Interner).clone())
-                    .collect::>(),
+                    .map(|arg| arg.assert_ty_ref(Interner).clone()),
             ),
             is_varargs: fn_ptr.sig.variadic,
             safety: fn_ptr.sig.safety,
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 5122021d6d2bd..30ebd1f92e071 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -1459,8 +1459,7 @@ pub(crate) fn generic_predicates_for_param_recover(
     _param_id: &TypeOrConstParamId,
     _assoc_name: &Option,
 ) -> Arc<[Binders]> {
-    // FIXME: use `Arc::from_iter` when it becomes available
-    Arc::from(vec![])
+    Arc::from_iter(None)
 }
 
 pub(crate) fn trait_environment_for_body_query(
@@ -1603,44 +1602,35 @@ pub(crate) fn generic_defaults_query(
     let generic_params = generics(db.upcast(), def);
     let parent_start_idx = generic_params.len_self();
 
-    let defaults = Arc::from(
-        generic_params
-            .iter()
-            .enumerate()
-            .map(|(idx, (id, p))| {
-                match p {
-                    TypeOrConstParamData::TypeParamData(p) => {
-                        let mut ty = p
-                            .default
-                            .as_ref()
-                            .map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
-                        // Each default can only refer to previous parameters.
-                        // Type variable default referring to parameter coming
-                        // after it is forbidden (FIXME: report diagnostic)
-                        ty = fallback_bound_vars(ty, idx, parent_start_idx);
-                        crate::make_binders(db, &generic_params, ty.cast(Interner))
-                    }
-                    TypeOrConstParamData::ConstParamData(p) => {
-                        let mut val = p.default.as_ref().map_or_else(
-                            || {
-                                unknown_const_as_generic(
-                                    db.const_param_ty(ConstParamId::from_unchecked(id)),
-                                )
-                            },
-                            |c| {
-                                let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
-                                c.cast(Interner)
-                            },
-                        );
-                        // Each default can only refer to previous parameters, see above.
-                        val = fallback_bound_vars(val, idx, parent_start_idx);
-                        make_binders(db, &generic_params, val)
-                    }
-                }
-            })
-            // FIXME: use `Arc::from_iter` when it becomes available
-            .collect::>(),
-    );
+    let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| {
+        match p {
+            TypeOrConstParamData::TypeParamData(p) => {
+                let mut ty =
+                    p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+                // Each default can only refer to previous parameters.
+                // Type variable default referring to parameter coming
+                // after it is forbidden (FIXME: report diagnostic)
+                ty = fallback_bound_vars(ty, idx, parent_start_idx);
+                crate::make_binders(db, &generic_params, ty.cast(Interner))
+            }
+            TypeOrConstParamData::ConstParamData(p) => {
+                let mut val = p.default.as_ref().map_or_else(
+                    || {
+                        unknown_const_as_generic(
+                            db.const_param_ty(ConstParamId::from_unchecked(id)),
+                        )
+                    },
+                    |c| {
+                        let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
+                        c.cast(Interner)
+                    },
+                );
+                // Each default can only refer to previous parameters, see above.
+                val = fallback_bound_vars(val, idx, parent_start_idx);
+                make_binders(db, &generic_params, val)
+            }
+        }
+    }));
 
     defaults
 }
@@ -1653,19 +1643,13 @@ pub(crate) fn generic_defaults_recover(
     let generic_params = generics(db.upcast(), *def);
     // FIXME: this code is not covered in tests.
     // we still need one default per parameter
-    let defaults = Arc::from(
-        generic_params
-            .iter_id()
-            .map(|id| {
-                let val = match id {
-                    Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
-                    Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
-                };
-                crate::make_binders(db, &generic_params, val)
-            })
-            // FIXME: use `Arc::from_iter` when it becomes available
-            .collect::>(),
-    );
+    let defaults = Arc::from_iter(generic_params.iter_id().map(|id| {
+        let val = match id {
+            Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
+            Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+        };
+        crate::make_binders(db, &generic_params, val)
+    }));
 
     defaults
 }
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 732643566a2d6..041d61c1b153d 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -168,12 +168,9 @@ impl TraitImpls {
     ) -> Arc<[Arc]> {
         let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
         let crate_graph = db.crate_graph();
-        // FIXME: use `Arc::from_iter` when it becomes available
-        Arc::from(
-            crate_graph
-                .transitive_deps(krate)
-                .map(|krate| db.trait_impls_in_crate(krate))
-                .collect::>(),
+
+        Arc::from_iter(
+            crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)),
         )
     }
 
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index c09f57252ce96..0f31fe16054a5 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -187,11 +187,9 @@ impl GlobalState {
             config_errors: Default::default(),
 
             proc_macro_changed: false,
-            // FIXME: use `Arc::from_iter` when it becomes available
-            proc_macro_clients: Arc::from(Vec::new()),
+            proc_macro_clients: Arc::from_iter([]),
 
-            // FIXME: use `Arc::from_iter` when it becomes available
-            flycheck: Arc::from(Vec::new()),
+            flycheck: Arc::from_iter([]),
             flycheck_sender,
             flycheck_receiver,
             last_flycheck_error: None,
@@ -202,7 +200,7 @@ impl GlobalState {
             vfs_progress_n_total: 0,
             vfs_progress_n_done: 0,
 
-            workspaces: Arc::new(Vec::new()),
+            workspaces: Arc::from(Vec::new()),
             crate_graph_file_dependencies: FxHashSet::default(),
             fetch_workspaces_queue: OpQueue::default(),
             fetch_build_data_queue: OpQueue::default(),
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 49c88702faad0..57955ebf897e3 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -51,8 +51,7 @@ use crate::{
 };
 
 pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
-    // FIXME: use `Arc::from_iter` when it becomes available
-    state.proc_macro_clients = Arc::from(Vec::new());
+    state.proc_macro_clients = Arc::from_iter([]);
     state.proc_macro_changed = false;
 
     state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false);
@@ -60,8 +59,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow:
 }
 
 pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
-    // FIXME: use `Arc::from_iter` when it becomes available
-    state.proc_macro_clients = Arc::from(Vec::new());
+    state.proc_macro_clients = Arc::from_iter([]);
     state.proc_macro_changed = false;
 
     state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ());
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index abe2191f40024..7ab528f497511 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -437,28 +437,22 @@ impl GlobalState {
             if self.config.expand_proc_macros() {
                 tracing::info!("Spawning proc-macro servers");
 
-                // FIXME: use `Arc::from_iter` when it becomes available
-                self.proc_macro_clients = Arc::from(
-                    self.workspaces
-                        .iter()
-                        .map(|ws| {
-                            let path = match self.config.proc_macro_srv() {
-                                Some(path) => path,
-                                None => ws.find_sysroot_proc_macro_srv()?,
-                            };
-
-                            tracing::info!("Using proc-macro server at {path}");
-                            ProcMacroServer::spawn(path.clone()).map_err(|err| {
-                                tracing::error!(
-                                    "Failed to run proc-macro server from path {path}, error: {err:?}",
-                                );
-                                anyhow::format_err!(
-                                    "Failed to run proc-macro server from path {path}, error: {err:?}",
-                                )
-                            })
-                        })
-                        .collect::>(),
-                )
+                self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
+                    let path = match self.config.proc_macro_srv() {
+                        Some(path) => path,
+                        None => ws.find_sysroot_proc_macro_srv()?,
+                    };
+
+                    tracing::info!("Using proc-macro server at {path}");
+                    ProcMacroServer::spawn(path.clone()).map_err(|err| {
+                        tracing::error!(
+                            "Failed to run proc-macro server from path {path}, error: {err:?}",
+                        );
+                        anyhow::format_err!(
+                            "Failed to run proc-macro server from path {path}, error: {err:?}",
+                        )
+                    })
+                }))
             };
         }
 

From a84685a58d3ce833d8d2517f0b9069569edd16db Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Wed, 6 Dec 2023 21:56:04 +0100
Subject: [PATCH 53/80] Publish lsp-server 0.7.5

---
 Cargo.lock                | 12 ++++++------
 lib/lsp-server/Cargo.toml |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 876ba2546a38f..51db45fc9be4a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -939,23 +939,23 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
 [[package]]
 name = "lsp-server"
 version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
 dependencies = [
  "crossbeam-channel",
- "ctrlc",
  "log",
- "lsp-types",
  "serde",
  "serde_json",
 ]
 
 [[package]]
 name = "lsp-server"
-version = "0.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
+version = "0.7.5"
 dependencies = [
  "crossbeam-channel",
+ "ctrlc",
  "log",
+ "lsp-types",
  "serde",
  "serde_json",
 ]
@@ -1529,7 +1529,7 @@ dependencies = [
  "ide-ssr",
  "itertools",
  "load-cargo",
- "lsp-server 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lsp-server 0.7.4",
  "lsp-types",
  "mbe",
  "mimalloc",
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index be1573913ff24..2a70aedbe8ec1 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "lsp-server"
-version = "0.7.4"
+version = "0.7.5"
 description = "Generic LSP server scaffold."
 license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"

From 9a3167e65e80a26698dfc651cc0cd5c405fbd1a9 Mon Sep 17 00:00:00 2001
From: roife 
Date: Thu, 7 Dec 2023 15:30:00 +0800
Subject: [PATCH 54/80] fix: correct calculation for fields in WideChar for
 line-specific positions

---
 lib/line-index/src/lib.rs | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/lib/line-index/src/lib.rs b/lib/line-index/src/lib.rs
index 03371c9c87af9..58f266d67f629 100644
--- a/lib/line-index/src/lib.rs
+++ b/lib/line-index/src/lib.rs
@@ -363,7 +363,10 @@ fn analyze_source_file_generic(
             let c = src[i..].chars().next().unwrap();
             char_len = c.len_utf8();
 
-            let pos = TextSize::from(i as u32) + output_offset;
+            // The last element of `lines` represents the offset of the start of
+            // current line. To get the offset inside the line, we subtract it.
+            let pos = TextSize::from(i as u32) + output_offset
+                - lines.last().unwrap_or(&TextSize::default());
 
             if char_len > 1 {
                 assert!((2..=4).contains(&char_len));

From a011b6c84c19357d21cd418f011f4d62b320abf7 Mon Sep 17 00:00:00 2001
From: roife 
Date: Thu, 7 Dec 2023 15:31:15 +0800
Subject: [PATCH 55/80] fix: correct existing tests for WideChar in lib
 'line-index' and add more tests

---
 lib/line-index/src/tests.rs | 28 ++++++++++++++++++++++++++--
 1 file changed, 26 insertions(+), 2 deletions(-)

diff --git a/lib/line-index/src/tests.rs b/lib/line-index/src/tests.rs
index 8f3762d191064..981008e346ba4 100644
--- a/lib/line-index/src/tests.rs
+++ b/lib/line-index/src/tests.rs
@@ -1,4 +1,4 @@
-use crate::{LineIndex, TextSize, WideChar};
+use crate::{LineCol, LineIndex, TextSize, WideChar, WideEncoding, WideLineCol};
 
 macro_rules! test {
     (
@@ -102,7 +102,7 @@ test!(
     case: multi_byte_with_new_lines,
     text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
     lines: vec![7, 27],
-    multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))],
+    multi_byte_chars: vec![(1, (6, 8)), (2, (2, 4))],
 );
 
 test!(
@@ -118,3 +118,27 @@ test!(
     lines: vec![16],
     multi_byte_chars: vec![],
 );
+
+#[test]
+fn test_try_line_col() {
+    let text = "\n\n\n\n\n宽3456";
+    assert_eq!(&text[5..8], "宽");
+    assert_eq!(&text[11..12], "6");
+    let line_index = LineIndex::new(text);
+    let before_6 = TextSize::from(11);
+    let line_col = line_index.try_line_col(before_6);
+    assert_eq!(line_col, Some(LineCol { line: 5, col: 6 }));
+}
+
+#[test]
+fn test_to_wide() {
+    let text = "\n\n\n\n\n宽3456";
+    assert_eq!(&text[5..8], "宽");
+    assert_eq!(&text[11..12], "6");
+    let line_index = LineIndex::new(text);
+    let before_6 = TextSize::from(11);
+    let line_col = line_index.try_line_col(before_6);
+    assert_eq!(line_col, Some(LineCol { line: 5, col: 6 }));
+    let wide_line_col = line_index.to_wide(WideEncoding::Utf16, line_col.unwrap());
+    assert_eq!(wide_line_col, Some(WideLineCol { line: 5, col: 4 }));
+}

From 45d81048c98549f44e23d18ba49bd4cae08459a3 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Thu, 7 Dec 2023 10:57:51 +0100
Subject: [PATCH 56/80] internal: Bump salsa

---
 Cargo.lock                                |  9 ++++-----
 Cargo.toml                                | 19 +++++++++++--------
 crates/base-db/Cargo.toml                 |  8 +++-----
 crates/hir-ty/src/consteval.rs            |  8 ++++----
 crates/hir-ty/src/layout.rs               |  3 ++-
 crates/hir-ty/src/layout/adt.rs           |  3 ++-
 crates/hir-ty/src/lower.rs                | 10 +++++-----
 crates/hir-ty/src/mir/lower.rs            |  4 ++--
 crates/hir-ty/src/mir/monomorphization.rs |  3 ++-
 9 files changed, 35 insertions(+), 32 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 876ba2546a38f..0d624c7907455 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1569,11 +1569,10 @@ dependencies = [
 
 [[package]]
 name = "rust-analyzer-salsa"
-version = "0.17.0-pre.3"
+version = "0.17.0-pre.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ca92b657d614d076800aa7bf5d5ba33564e71fa7f16cd79eacdfe301a50ab1c"
+checksum = "16c42b8737c320578b441a82daf7cdf8d897468de64e8a774fa54b53a50b6cc0"
 dependencies = [
- "crossbeam-utils",
  "indexmap",
  "lock_api",
  "log",
@@ -1586,9 +1585,9 @@ dependencies = [
 
 [[package]]
 name = "rust-analyzer-salsa-macros"
-version = "0.17.0-pre.3"
+version = "0.17.0-pre.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b190359266d293f2ee13eaa502a766dc8b77b63fbaa5d460d24fd0210675ceef"
+checksum = "db72b0883f3592ade2be15a10583c75e0b269ec26e1190800fda2e2ce5ae6634"
 dependencies = [
  "heck",
  "proc-macro2",
diff --git a/Cargo.toml b/Cargo.toml
index 272f456bf9f86..17810d0f299d1 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -42,7 +42,7 @@ debug = 0
 
 # ungrammar = { path = "../ungrammar" }
 
-# salsa = { path = "../salsa" }
+# rust-analyzer-salsa = { path = "../salsa" }
 
 [workspace.dependencies]
 # local crates
@@ -98,11 +98,19 @@ either = "1.9.0"
 indexmap = "2.1.0"
 itertools = "0.12.0"
 libc = "0.2.150"
+nohash-hasher = "0.2.0"
+rayon = "1.8.0"
+rust-analyzer-salsa = "0.17.0-pre.4"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.192", features = ["derive"] }
+serde_json = "1.0.108"
 smallvec = { version = "1.10.0", features = [
   "const_new",
   "union",
   "const_generics",
 ] }
+smol_str = "0.2.0"
+text-size = "1.1.1"
 tracing = "0.1.40"
 tracing-tree = "0.3.0"
 tracing-subscriber = { version = "0.3.18", default-features = false, features = [
@@ -110,15 +118,10 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
     "fmt",
     "tracing-log",
 ] }
-smol_str = "0.2.0"
-nohash-hasher = "0.2.0"
-text-size = "1.1.1"
-rayon = "1.8.0"
-serde = { version = "1.0.192", features = ["derive"] }
-serde_json = "1.0.108"
 triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
+xshell = "0.2.5"
+
 # can't upgrade due to dashmap depending on 0.12.3 currently
 hashbrown = { version = "0.12.3", features = [
   "inline-more",
 ], default-features = false }
-xshell = "0.2.5"
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index 5ad88f65188e0..393ffe155ba88 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -12,12 +12,10 @@ rust-version.workspace = true
 doctest = false
 
 [dependencies]
-rust-analyzer-salsa = "0.17.0-pre.3"
-rustc-hash = "1.1.0"
-
-triomphe.workspace = true
-
 la-arena.workspace = true
+rust-analyzer-salsa.workspace = true
+rustc-hash.workspace = true
+triomphe.workspace = true
 
 # local deps
 cfg.workspace = true
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 0348680e5da19..576a07d4fb69e 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -1,6 +1,6 @@
 //! Constant evaluation details
 
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
 use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
 use hir_def::{
     hir::Expr,
@@ -184,7 +184,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option {
 
 pub(crate) fn const_eval_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &GeneralConstId,
     _: &Substitution,
     _: &Option>,
@@ -194,7 +194,7 @@ pub(crate) fn const_eval_recover(
 
 pub(crate) fn const_eval_static_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &StaticId,
 ) -> Result {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@@ -202,7 +202,7 @@ pub(crate) fn const_eval_static_recover(
 
 pub(crate) fn const_eval_discriminant_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &EnumVariantId,
 ) -> Result {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 27c7949986878..bfc4f1383ec6a 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -2,6 +2,7 @@
 
 use std::fmt;
 
+use base_db::salsa::Cycle;
 use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
 use hir_def::{
     layout::{
@@ -431,7 +432,7 @@ pub fn layout_of_ty_query(
 
 pub fn layout_of_ty_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &Ty,
     _: &Arc,
 ) -> Result, LayoutError> {
diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs
index 58a06dc643542..39788a9502993 100644
--- a/crates/hir-ty/src/layout/adt.rs
+++ b/crates/hir-ty/src/layout/adt.rs
@@ -2,6 +2,7 @@
 
 use std::{cmp, ops::Bound};
 
+use base_db::salsa::Cycle;
 use hir_def::{
     data::adt::VariantData,
     layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
@@ -140,7 +141,7 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound,
 
 pub fn layout_of_adt_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &AdtId,
     _: &Substitution,
     _: &Arc,
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 30ebd1f92e071..2a6d69e7fc624 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -10,7 +10,7 @@ use std::{
     iter,
 };
 
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
 use chalk_ir::{
     cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
 };
@@ -1454,7 +1454,7 @@ pub(crate) fn generic_predicates_for_param_query(
 
 pub(crate) fn generic_predicates_for_param_recover(
     _db: &dyn HirDatabase,
-    _cycle: &[String],
+    _cycle: &Cycle,
     _def: &GenericDefId,
     _param_id: &TypeOrConstParamId,
     _assoc_name: &Option,
@@ -1637,7 +1637,7 @@ pub(crate) fn generic_defaults_query(
 
 pub(crate) fn generic_defaults_recover(
     db: &dyn HirDatabase,
-    _cycle: &[String],
+    _cycle: &Cycle,
     def: &GenericDefId,
 ) -> Arc<[Binders]> {
     let generic_params = generics(db.upcast(), *def);
@@ -1865,7 +1865,7 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders {
     }
 }
 
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders {
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders {
     let generics = match *def {
         TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
         TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
@@ -1915,7 +1915,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
 
 pub(crate) fn impl_self_ty_recover(
     db: &dyn HirDatabase,
-    _cycle: &[String],
+    _cycle: &Cycle,
     impl_id: &ImplId,
 ) -> Binders {
     let generics = generics(db.upcast(), (*impl_id).into());
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 922aee011cf33..639fabc198c15 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -2,7 +2,7 @@
 
 use std::{fmt::Write, iter, mem};
 
-use base_db::FileId;
+use base_db::{salsa::Cycle, FileId};
 use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
 use hir_def::{
     body::Body,
@@ -2110,7 +2110,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result Result> {
     Err(MirLowerError::Loop)
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index 7d2bb95d931c2..8da03eef2e0c6 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -9,6 +9,7 @@
 
 use std::mem;
 
+use base_db::salsa::Cycle;
 use chalk_ir::{
     fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
     ConstData, DebruijnIndex,
@@ -300,7 +301,7 @@ pub fn monomorphized_mir_body_query(
 
 pub fn monomorphized_mir_body_recover(
     _: &dyn HirDatabase,
-    _: &[String],
+    _: &Cycle,
     _: &DefWithBodyId,
     _: &Substitution,
     _: &Arc,

From 5544f4fa3de9abf0e5b1ea486795fbec6f466c76 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Thu, 7 Dec 2023 11:48:58 +0100
Subject: [PATCH 57/80] Bump and unlock some dependencies

---
 Cargo.lock                      | 67 ++++++++++++---------------------
 Cargo.toml                      |  9 +++--
 crates/hir-def/Cargo.toml       |  3 +-
 crates/intern/Cargo.toml        |  2 +-
 crates/rust-analyzer/Cargo.toml |  5 ---
 crates/stdx/Cargo.toml          |  2 +-
 6 files changed, 33 insertions(+), 55 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 0d624c7907455..ca058dba173af 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -276,7 +276,7 @@ dependencies = [
  "autocfg",
  "cfg-if",
  "crossbeam-utils",
- "memoffset 0.9.0",
+ "memoffset",
  "scopeguard",
 ]
 
@@ -301,12 +301,12 @@ dependencies = [
 
 [[package]]
 name = "dashmap"
-version = "5.4.0"
+version = "5.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
+checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
 dependencies = [
  "cfg-if",
- "hashbrown 0.12.3",
+ "hashbrown",
  "lock_api",
  "once_cell",
  "parking_lot_core",
@@ -448,15 +448,9 @@ checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
 
 [[package]]
 name = "hashbrown"
-version = "0.12.3"
+version = "0.14.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
-
-[[package]]
-name = "hashbrown"
-version = "0.14.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
+checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
 
 [[package]]
 name = "heck"
@@ -509,7 +503,7 @@ dependencies = [
  "either",
  "expect-test",
  "fst",
- "hashbrown 0.12.3",
+ "hashbrown",
  "hir-expand",
  "indexmap",
  "intern",
@@ -539,7 +533,7 @@ dependencies = [
  "cov-mark",
  "either",
  "expect-test",
- "hashbrown 0.12.3",
+ "hashbrown",
  "intern",
  "itertools",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -765,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f"
 dependencies = [
  "equivalent",
- "hashbrown 0.14.2",
+ "hashbrown",
 ]
 
 [[package]]
@@ -793,7 +787,7 @@ name = "intern"
 version = "0.0.0"
 dependencies = [
  "dashmap",
- "hashbrown 0.12.3",
+ "hashbrown",
  "rustc-hash",
  "triomphe",
 ]
@@ -1003,15 +997,6 @@ dependencies = [
  "libc",
 ]
 
-[[package]]
-name = "memoffset"
-version = "0.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
-dependencies = [
- "autocfg",
-]
-
 [[package]]
 name = "memoffset"
 version = "0.9.0"
@@ -1062,11 +1047,11 @@ dependencies = [
 
 [[package]]
 name = "miow"
-version = "0.5.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
+checksum = "359f76430b20a79f9e20e115b3428614e654f04fab314482fc0fda0ebd3c6044"
 dependencies = [
- "windows-sys 0.42.0",
+ "windows-sys 0.48.0",
 ]
 
 [[package]]
@@ -1178,15 +1163,15 @@ dependencies = [
 
 [[package]]
 name = "parking_lot_core"
-version = "0.9.6"
+version = "0.9.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
+checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
 dependencies = [
  "cfg-if",
  "libc",
- "redox_syscall 0.2.16",
+ "redox_syscall 0.4.1",
  "smallvec",
- "windows-sys 0.42.0",
+ "windows-targets",
 ]
 
 [[package]]
@@ -1481,31 +1466,31 @@ dependencies = [
 
 [[package]]
 name = "redox_syscall"
-version = "0.2.16"
+version = "0.3.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
 dependencies = [
  "bitflags 1.3.2",
 ]
 
 [[package]]
 name = "redox_syscall"
-version = "0.3.5"
+version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
 dependencies = [
  "bitflags 1.3.2",
 ]
 
 [[package]]
 name = "rowan"
-version = "0.15.11"
+version = "0.15.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
+checksum = "a9672ea408d491b517a4dc370159ec6dd7cb5c5fd2f41b02883830339109ac76"
 dependencies = [
  "countme",
- "hashbrown 0.12.3",
- "memoffset 0.8.0",
+ "hashbrown",
+ "memoffset",
  "rustc-hash",
  "text-size",
 ]
@@ -1533,12 +1518,10 @@ dependencies = [
  "lsp-types",
  "mbe",
  "mimalloc",
- "mio",
  "nohash-hasher",
  "num_cpus",
  "oorandom",
  "parking_lot",
- "parking_lot_core",
  "parser",
  "proc-macro-api",
  "profile",
diff --git a/Cargo.toml b/Cargo.toml
index 17810d0f299d1..f3f01aab8eee6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -95,6 +95,9 @@ bitflags = "2.4.1"
 cargo_metadata = "0.18.1"
 dissimilar = "1.0.7"
 either = "1.9.0"
+hashbrown = { version = "0.14", features = [
+  "inline-more",
+], default-features = false }
 indexmap = "2.1.0"
 itertools = "0.12.0"
 libc = "0.2.150"
@@ -121,7 +124,5 @@ tracing-subscriber = { version = "0.3.18", default-features = false, features =
 triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
 xshell = "0.2.5"
 
-# can't upgrade due to dashmap depending on 0.12.3 currently
-hashbrown = { version = "0.12.3", features = [
-  "inline-more",
-], default-features = false }
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.5.3", features = ["raw-api"] }
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index e4f2e14c51c72..2d174517605f1 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -15,8 +15,7 @@ doctest = false
 arrayvec = "0.7.2"
 bitflags.workspace = true
 cov-mark = "2.0.0-pre.1"
-# We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
 drop_bomb = "0.1.5"
 either.workspace = true
 fst = { version = "0.4.7", default-features = false }
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index 89b302c796b52..d9184b0fb6fe5 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
 
 [dependencies]
 # We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
 hashbrown.workspace = true
 rustc-hash = "1.1.0"
 triomphe.workspace = true
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 76f764460329b..408c1fb6f39b7 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -43,11 +43,6 @@ triomphe.workspace = true
 nohash-hasher.workspace = true
 always-assert = "0.1.2"
 
-# These 3 deps are not used by r-a directly, but we list them here to lock in their versions
-# in our transitive deps to prevent them from pulling in windows-sys 0.45.0
-mio = "=0.8.5"
-parking_lot_core = "=0.9.6"
-
 cfg.workspace = true
 flycheck.workspace = true
 hir-def.workspace = true
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index ea6c11ac0d56a..c914ae2144b59 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -21,7 +21,7 @@ itertools.workspace = true
 # Think twice before adding anything here
 
 [target.'cfg(windows)'.dependencies]
-miow = "0.5.0"
+miow = "0.6.0"
 winapi = { version = "0.3.9", features = ["winerror"] }
 
 [features]

From fbe494a10613f8fff8c03fa81b6751740a327068 Mon Sep 17 00:00:00 2001
From: Young-Flash <871946895@qq.com>
Date: Thu, 7 Dec 2023 20:45:42 +0800
Subject: [PATCH 58/80] fix: change default diagnostic range into impl body

---
 crates/hir/src/diagnostics.rs                 |  1 +
 crates/hir/src/lib.rs                         |  1 +
 .../trait_impl_redundant_assoc_item.rs        | 19 +++++++++++++------
 3 files changed, 15 insertions(+), 6 deletions(-)

diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 52c1c27a7fa93..1cb36f9b021fe 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -316,5 +316,6 @@ pub struct TraitImplMissingAssocItems {
 pub struct TraitImplRedundantAssocItems {
     pub file_id: HirFileId,
     pub trait_: Trait,
+    pub impl_: AstPtr,
     pub assoc_item: (Name, AssocItem),
 }
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 4db1a02c0cd2a..5137bff055a4b 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -706,6 +706,7 @@ impl Module {
                         TraitImplRedundantAssocItems {
                             trait_,
                             file_id,
+                            impl_: ast_id_map.get(node.ast_id()),
                             assoc_item: (name, assoc_item),
                         }
                         .into(),
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index 6aded11382cbd..820014391467e 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -1,4 +1,5 @@
-use hir::{db::ExpandDatabase, Const, Function, HasSource, TypeAlias};
+use hir::{Const, Function, HasSource, TypeAlias};
+use ide_db::base_db::FileRange;
 
 use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
 
@@ -13,31 +14,37 @@ pub(crate) fn trait_impl_redundant_assoc_item(
     let assoc_item = d.assoc_item.1;
     let db = ctx.sema.db;
 
-    let range = db.parse_or_expand(d.file_id).text_range();
+    let default_range = d.impl_.syntax_node_ptr().text_range();
     let trait_name = d.trait_.name(db).to_smol_str();
 
     let (redundant_item_name, diagnostic_range) = match assoc_item {
         hir::AssocItem::Function(id) => (
             format!("`fn {}`", name.display(db)),
-            Function::from(id).source(db).map(|it| it.syntax().value.text_range()).unwrap_or(range),
+            Function::from(id)
+                .source(db)
+                .map(|it| it.syntax().value.text_range())
+                .unwrap_or(default_range),
         ),
         hir::AssocItem::Const(id) => (
             format!("`const {}`", name.display(db)),
-            Const::from(id).source(db).map(|it| it.syntax().value.text_range()).unwrap_or(range),
+            Const::from(id)
+                .source(db)
+                .map(|it| it.syntax().value.text_range())
+                .unwrap_or(default_range),
         ),
         hir::AssocItem::TypeAlias(id) => (
             format!("`type {}`", name.display(db)),
             TypeAlias::from(id)
                 .source(db)
                 .map(|it| it.syntax().value.text_range())
-                .unwrap_or(range),
+                .unwrap_or(default_range),
         ),
     };
 
     Diagnostic::new(
         DiagnosticCode::RustcHardError("E0407"),
         format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
-        diagnostic_range,
+        FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range },
     )
 }
 

From c11a002bca3c7b465ce21dcdbfffe18fd2a8c58c Mon Sep 17 00:00:00 2001
From: hkalbasi 
Date: Fri, 8 Dec 2023 00:44:45 +0330
Subject: [PATCH 59/80] Fix panic with closure inside array len

---
 crates/hir-ty/src/consteval.rs        | 18 +++++++++++++++---
 crates/hir-ty/src/tests/regression.rs | 12 ++++++++++++
 2 files changed, 27 insertions(+), 3 deletions(-)

diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 0348680e5da19..b4bbae6175a11 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -3,7 +3,8 @@
 use base_db::CrateId;
 use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
 use hir_def::{
-    hir::Expr,
+    body::Body,
+    hir::{Expr, ExprId},
     path::Path,
     resolver::{Resolver, ValueNs},
     type_ref::LiteralConstRef,
@@ -280,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
 // get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
 // and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
 pub(crate) fn eval_to_const(
-    expr: Idx,
+    expr: ExprId,
     mode: ParamLoweringMode,
     ctx: &mut InferenceContext<'_>,
     args: impl FnOnce() -> Generics,
@@ -288,13 +289,24 @@ pub(crate) fn eval_to_const(
 ) -> Const {
     let db = ctx.db;
     let infer = ctx.clone().resolve_all();
+    fn has_closure(body: &Body, expr: ExprId) -> bool {
+        if matches!(body[expr], Expr::Closure { .. }) {
+            return true;
+        }
+        let mut r = false;
+        body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx));
+        r
+    }
+    if has_closure(&ctx.body, expr) {
+        // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
+        return unknown_const(infer[expr].clone());
+    }
     if let Expr::Path(p) = &ctx.body.exprs[expr] {
         let resolver = &ctx.resolver;
         if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
             return c;
         }
     }
-    let infer = ctx.clone().resolve_all();
     if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
         if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
             return result;
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 6ea059065e935..35079e70946d5 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2000,3 +2000,15 @@ fn test() {
 "#,
     );
 }
+
+#[test]
+fn rustc_test_issue_52437() {
+    check_types(
+        r#"
+    fn main() {
+        let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize]
+          //^ [(); _]
+    }
+    "#,
+    );
+}

From 143203b7133a810c068e44c657fc6c5ee978023b Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 10:47:36 +0100
Subject: [PATCH 60/80] Make TraitEnvironment's constructor private

---
 crates/hir-ty/src/consteval.rs  |  2 +-
 crates/hir-ty/src/display.rs    |  5 ++---
 crates/hir-ty/src/infer/expr.rs |  6 ++---
 crates/hir-ty/src/lib.rs        |  2 +-
 crates/hir-ty/src/lower.rs      |  9 ++------
 crates/hir-ty/src/mir.rs        |  3 +--
 crates/hir-ty/src/traits.rs     | 22 +++++++++++++++----
 crates/hir/src/lib.rs           | 39 +++++++++++++++------------------
 8 files changed, 46 insertions(+), 42 deletions(-)

diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index ddeb9f14b5dc7..9792d945eb8f5 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -137,7 +137,7 @@ pub fn intern_const_ref(
     ty: Ty,
     krate: CrateId,
 ) -> Const {
-    let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
+    let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
     let bytes = match value {
         LiteralConstRef::Int(i) => {
             // FIXME: We should handle failure of layout better.
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index a324129b351cc..d81926f7c9762 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -448,9 +448,8 @@ fn render_const_scalar(
 ) -> Result<(), HirDisplayError> {
     // FIXME: We need to get krate from the final callers of the hir display
     // infrastructure and have it here as a field on `f`.
-    let trait_env = Arc::new(TraitEnvironment::empty(
-        *f.db.crate_graph().crates_in_topological_order().last().unwrap(),
-    ));
+    let trait_env =
+        TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
     match ty.kind(Interner) {
         TyKind::Scalar(s) => match s {
             Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 0c3c725a7c743..24026202b74d6 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -18,7 +18,6 @@ use hir_def::{
 use hir_expand::name::{name, Name};
 use stdx::always;
 use syntax::ast::RangeOp;
-use triomphe::Arc;
 
 use crate::{
     autoderef::{builtin_deref, deref_by_trait, Autoderef},
@@ -40,7 +39,8 @@ use crate::{
     traits::FnTrait,
     utils::{generics, Generics},
     Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
-    Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+    Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
+    TyKind,
 };
 
 use super::{
@@ -1291,7 +1291,7 @@ impl InferenceContext<'_> {
         let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
         let prev_env = block_id.map(|block_id| {
             let prev_env = self.table.trait_env.clone();
-            Arc::make_mut(&mut self.table.trait_env).block = Some(block_id);
+            TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
             prev_env
         });
 
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 5a3e423f152ae..cf174feed24b8 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -122,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind;
 pub type TypeFlags = chalk_ir::TypeFlags;
 pub type DynTy = chalk_ir::DynTy;
 pub type FnPointer = chalk_ir::FnPointer;
-// pub type FnSubst = chalk_ir::FnSubst;
+// pub type FnSubst = chalk_ir::FnSubst; // a re-export so we don't lose the tuple constructor
 pub use chalk_ir::FnSubst;
 pub type ProjectionTy = chalk_ir::ProjectionTy;
 pub type AliasTy = chalk_ir::AliasTy;
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 2a6d69e7fc624..c86fe9adff866 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -1468,7 +1468,7 @@ pub(crate) fn trait_environment_for_body_query(
 ) -> Arc {
     let Some(def) = def.as_generic_def_id() else {
         let krate = def.module(db.upcast()).krate();
-        return Arc::new(TraitEnvironment::empty(krate));
+        return TraitEnvironment::empty(krate);
     };
     db.trait_environment(def)
 }
@@ -1528,12 +1528,7 @@ pub(crate) fn trait_environment_query(
 
     let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
 
-    Arc::new(TraitEnvironment {
-        krate,
-        block: None,
-        traits_from_clauses: traits_in_scope.into_boxed_slice(),
-        env,
-    })
+    TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
 }
 
 /// Resolve the where clause(s) of an item with generics.
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index 2e6fe59d3bd80..f1795e71d945c 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -40,7 +40,6 @@ pub use monomorphization::{
 use rustc_hash::FxHashMap;
 use smallvec::{smallvec, SmallVec};
 use stdx::{impl_from, never};
-use triomphe::Arc;
 
 use super::consteval::{intern_const_scalar, try_const_usize};
 
@@ -147,7 +146,7 @@ impl ProjectionElem {
             base = normalize(
                 db,
                 // FIXME: we should get this from caller
-                Arc::new(TraitEnvironment::empty(krate)),
+                TraitEnvironment::empty(krate),
                 base,
             );
         }
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index 467b94a2662a1..b6bc76bc98d53 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -48,18 +48,32 @@ pub struct TraitEnvironment {
     pub krate: CrateId,
     pub block: Option,
     // FIXME make this a BTreeMap
-    pub(crate) traits_from_clauses: Box<[(Ty, TraitId)]>,
+    traits_from_clauses: Box<[(Ty, TraitId)]>,
     pub env: chalk_ir::Environment,
 }
 
 impl TraitEnvironment {
-    pub fn empty(krate: CrateId) -> Self {
-        TraitEnvironment {
+    pub fn empty(krate: CrateId) -> Arc {
+        Arc::new(TraitEnvironment {
             krate,
             block: None,
             traits_from_clauses: Box::default(),
             env: chalk_ir::Environment::new(Interner),
-        }
+        })
+    }
+
+    pub fn new(
+        krate: CrateId,
+        block: Option,
+        traits_from_clauses: Box<[(Ty, TraitId)]>,
+        env: chalk_ir::Environment,
+    ) -> Arc {
+        Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
+    }
+
+    // pub fn with_block(self: &mut Arc, block: BlockId) {
+    pub fn with_block(this: &mut Arc, block: BlockId) {
+        Arc::make_mut(this).block = Some(block);
     }
 
     pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator + '_ {
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 5137bff055a4b..ca838c7a51e10 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -3564,10 +3564,9 @@ impl TraitRef {
         resolver: &Resolver,
         trait_ref: hir_ty::TraitRef,
     ) -> TraitRef {
-        let env = resolver.generic_def().map_or_else(
-            || Arc::new(TraitEnvironment::empty(resolver.krate())),
-            |d| db.trait_environment(d),
-        );
+        let env = resolver
+            .generic_def()
+            .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
         TraitRef { env, trait_ref }
     }
 
@@ -3707,15 +3706,14 @@ impl Type {
         resolver: &Resolver,
         ty: Ty,
     ) -> Type {
-        let environment = resolver.generic_def().map_or_else(
-            || Arc::new(TraitEnvironment::empty(resolver.krate())),
-            |d| db.trait_environment(d),
-        );
+        let environment = resolver
+            .generic_def()
+            .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
         Type { env: environment, ty }
     }
 
     pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
-        Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+        Type { env: TraitEnvironment::empty(krate), ty }
     }
 
     pub fn reference(inner: &Type, m: Mutability) -> Type {
@@ -3731,10 +3729,9 @@ impl Type {
 
     fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
         let resolver = lexical_env.resolver(db.upcast());
-        let environment = resolver.generic_def().map_or_else(
-            || Arc::new(TraitEnvironment::empty(resolver.krate())),
-            |d| db.trait_environment(d),
-        );
+        let environment = resolver
+            .generic_def()
+            .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
         Type { env: environment, ty }
     }
 
@@ -4304,10 +4301,10 @@ impl Type {
         let canonical = hir_ty::replace_errors_with_variables(&self.ty);
 
         let krate = scope.krate();
-        let environment = scope.resolver().generic_def().map_or_else(
-            || Arc::new(TraitEnvironment::empty(krate.id)),
-            |d| db.trait_environment(d),
-        );
+        let environment = scope
+            .resolver()
+            .generic_def()
+            .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
 
         method_resolution::iterate_method_candidates_dyn(
             &canonical,
@@ -4361,10 +4358,10 @@ impl Type {
         let canonical = hir_ty::replace_errors_with_variables(&self.ty);
 
         let krate = scope.krate();
-        let environment = scope.resolver().generic_def().map_or_else(
-            || Arc::new(TraitEnvironment::empty(krate.id)),
-            |d| db.trait_environment(d),
-        );
+        let environment = scope
+            .resolver()
+            .generic_def()
+            .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
 
         method_resolution::iterate_path_candidates(
             &canonical,

From 71337f6682c27233d9c7bf9c5e3383da451ba604 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 11:26:22 +0100
Subject: [PATCH 61/80] fix: Fix `concat_bytes!` expansion

---
 .../macro_expansion_tests/builtin_fn_macro.rs |  4 +--
 .../macro_expansion_tests/mbe/regression.rs   | 26 +++++++++++++++++++
 crates/hir-expand/src/builtin_fn_macro.rs     | 22 ++++++++++++++--
 crates/mbe/src/token_map.rs                   | 16 +++++++++---
 4 files changed, 61 insertions(+), 7 deletions(-)

diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 106ead83fad76..514219ee71505 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
 
 fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
 "##,
-        expect![[r##"
+        expect![[r#"
 #[rustc_builtin_macro]
 macro_rules! concat_bytes {}
 
 fn main() { [b'A', 66, 67, 68, b'E', 70]; }
-"##]],
+"#]],
     );
 }
 
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 2886b2a366c04..9010050ee6788 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -1004,3 +1004,29 @@ fn main() {
 "##]],
     );
 }
+
+#[test]
+fn eager_concat_bytes_panic() {
+    check(
+        r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+    let x = concat_bytes!(2);
+}
+
+"#,
+        expect![[r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+    let x = /* error: unexpected token in input */[];
+}
+
+"#]],
+    );
+}
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index 903c21c84eec9..c8f04bfee54f1 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -6,6 +6,7 @@ use base_db::{
 };
 use cfg::CfgExpr;
 use either::Either;
+use itertools::Itertools;
 use mbe::{parse_exprs_with_sep, parse_to_token_tree};
 use syntax::{
     ast::{self, AstToken},
@@ -491,8 +492,25 @@ fn concat_bytes_expand(
             }
         }
     }
-    let ident = tt::Ident { text: bytes.join(", ").into(), span };
-    ExpandResult { value: quote!(span =>[#ident]), err }
+    let value = tt::Subtree {
+        delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+        token_trees: {
+            Itertools::intersperse_with(
+                bytes.into_iter().map(|it| {
+                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
+                }),
+                || {
+                    tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+                        char: ',',
+                        spacing: tt::Spacing::Alone,
+                        span,
+                    }))
+                },
+            )
+            .collect()
+        },
+    };
+    ExpandResult { value, err }
 }
 
 fn concat_bytes_expand_subtree(
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs
index 28b39b4f1eec4..7d15812f8cb60 100644
--- a/crates/mbe/src/token_map.rs
+++ b/crates/mbe/src/token_map.rs
@@ -2,7 +2,7 @@
 
 use std::hash::Hash;
 
-use stdx::itertools::Itertools;
+use stdx::{always, itertools::Itertools};
 use syntax::{TextRange, TextSize};
 use tt::Span;
 
@@ -21,13 +21,23 @@ impl SpanMap {
     /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
     /// in order.
     pub fn finish(&mut self) {
-        assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0));
+        always!(
+            self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0),
+            "spans are not in order"
+        );
         self.spans.shrink_to_fit();
     }
 
     /// Pushes a new span onto the [`SpanMap`].
     pub fn push(&mut self, offset: TextSize, span: S) {
-        debug_assert!(self.spans.last().map_or(true, |&(last_offset, _)| last_offset < offset));
+        if cfg!(debug_assertions) {
+            if let Some(&(last_offset, _)) = self.spans.last() {
+                assert!(
+                    last_offset < offset,
+                    "last_offset({last_offset:?}) must be smaller than offset({offset:?})"
+                );
+            }
+        }
         self.spans.push((offset, span));
     }
 

From e18b89452efc8e036ad0f95ff00d8e9e333a5c1a Mon Sep 17 00:00:00 2001
From: Matheus Cardoso 
Date: Fri, 25 Aug 2023 15:47:27 -0300
Subject: [PATCH 62/80] Flip binexpr works for lhs binexpr

---
 .../ide-assists/src/handlers/flip_binexpr.rs   | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs
index 2ea6f58fa0f1e..4b1e4165965c7 100644
--- a/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -33,6 +33,15 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
         return None;
     }
 
+    // If the lhs is a binary expression we check if its rhs can be used as the lhs of the current expression
+    let lhs = match BinExpr::cast(lhs.clone()) {
+        Some(lhs) => match lhs.rhs() {
+            Some(lhs) => lhs,
+            None => lhs,
+        },
+        None => lhs,
+    };
+
     acc.add(
         AssistId("flip_binexpr", AssistKind::RefactorRewrite),
         "Flip binary expression",
@@ -114,6 +123,15 @@ mod tests {
         )
     }
 
+    #[test]
+    fn flip_binexpr_works_for_lhs_binexpr() {
+        check_assist(
+            flip_binexpr,
+            r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }",
+            r"fn f() { let res = 1 + 4 + (2 - 3) + 5; }",
+        )
+    }
+
     #[test]
     fn flip_binexpr_works_inside_match() {
         check_assist(

From 3d9221291fb6630c90d1956c2f56d1bbbac55c93 Mon Sep 17 00:00:00 2001
From: Matheus Cardoso 
Date: Fri, 25 Aug 2023 20:14:35 -0300
Subject: [PATCH 63/80] flip binexpr works for lhs cmp

---
 .../ide-assists/src/handlers/flip_binexpr.rs  | 33 ++++++++++++-------
 1 file changed, 22 insertions(+), 11 deletions(-)

diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs
index 4b1e4165965c7..8b46a23f9a64d 100644
--- a/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -19,8 +19,19 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 // ```
 pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let expr = ctx.find_node_at_offset::()?;
-    let lhs = expr.lhs()?.syntax().clone();
     let rhs = expr.rhs()?.syntax().clone();
+    let lhs = expr.lhs()?.syntax().clone();
+
+    let lhs = if let Some(bin_expr) = BinExpr::cast(lhs.clone()) {
+        if bin_expr.op_kind() == expr.op_kind() {
+            bin_expr.rhs()?.syntax().clone()
+        } else {
+            lhs
+        }
+    } else {
+        lhs
+    };
+
     let op_range = expr.op_token()?.text_range();
     // The assist should be applied only if the cursor is on the operator
     let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
@@ -33,15 +44,6 @@ pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
         return None;
     }
 
-    // If the lhs is a binary expression we check if its rhs can be used as the lhs of the current expression
-    let lhs = match BinExpr::cast(lhs.clone()) {
-        Some(lhs) => match lhs.rhs() {
-            Some(lhs) => lhs,
-            None => lhs,
-        },
-        None => lhs,
-    };
-
     acc.add(
         AssistId("flip_binexpr", AssistKind::RefactorRewrite),
         "Flip binary expression",
@@ -124,7 +126,7 @@ mod tests {
     }
 
     #[test]
-    fn flip_binexpr_works_for_lhs_binexpr() {
+    fn flip_binexpr_works_for_lhs_arith() {
         check_assist(
             flip_binexpr,
             r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }",
@@ -132,6 +134,15 @@ mod tests {
         )
     }
 
+    #[test]
+    fn flip_binexpr_works_for_lhs_cmp() {
+        check_assist(
+            flip_binexpr,
+            r"fn f() { let res = 1 + (2 - 3) >$0 4 + 5; }",
+            r"fn f() { let res = 4 + 5 < 1 + (2 - 3); }",
+        )
+    }
+
     #[test]
     fn flip_binexpr_works_inside_match() {
         check_assist(

From 2d879e043176dcb683a43e0a90d17126c0bc22d5 Mon Sep 17 00:00:00 2001
From: petr-tik 
Date: Sun, 20 Aug 2023 18:31:25 +0100
Subject: [PATCH 64/80] Stop offering private functions in completions

Before
Private functions have RawVisibility module, but were
missed because take_types returned None early. After resolve_visibility
returned None, Visibility::Public was set instead and private functions
ended up being offered in autocompletion.

Choosing such a function results in an immediate error diagnostic
about using a private function.

After
Pattern match of take_types that returns None and
query for Module-level visibility from the original_module

Fix #15134 - tested with a unit test and a manual end-to-end
test of building rust-analyzer from my branch and opening
the reproduction repository

REVIEW
Refactor to move scope_def_applicable and check function visibility
from a module

Please let me know what's the best way to add a unit tests to
nameres, which is where the root cause was
---
 crates/hir-def/src/nameres/path_resolution.rs | 14 +++++----
 crates/ide-completion/src/completions/expr.rs | 29 +++++++++++++------
 crates/ide-completion/src/tests/special.rs    | 24 +++++++++++++++
 3 files changed, 53 insertions(+), 14 deletions(-)

diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index 4c1b8f306c50f..7c3231913d320 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -93,11 +93,15 @@ impl DefMap {
                 if remaining.is_some() {
                     return None;
                 }
-                let types = result.take_types()?;
-                match types {
-                    ModuleDefId::ModuleId(m) => Visibility::Module(m),
-                    _ => {
-                        // error: visibility needs to refer to module
+                let types = result.take_types();
+
+                match (types, path.kind) {
+                    (Some(ModuleDefId::ModuleId(m)), _) => Visibility::Module(m),
+                    // resolve_path doesn't find any values for a plan pathkind of a private function
+                    (None, PathKind::Plain | PathKind::Crate) => {
+                        Visibility::Module(self.module_id(original_module))
+                    }
+                    (_, _) => {
                         return None;
                     }
                 }
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index d3c817d4b43ad..17a52787b8a88 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -1,6 +1,6 @@
 //! Completion of names from the current scope in expression position.
 
-use hir::ScopeDef;
+use hir::{HasVisibility, Module, ScopeDef};
 use syntax::ast;
 
 use crate::{
@@ -9,6 +9,23 @@ use crate::{
     CompletionContext, Completions,
 };
 
+fn scope_def_applicable(
+    def: ScopeDef,
+    ctx: &CompletionContext<'_>,
+    module: Option<&Module>,
+) -> bool {
+    match (def, module) {
+        (ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_), _) => {
+            false
+        }
+        (ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)), _) => mac.is_fn_like(ctx.db),
+        (ScopeDef::ModuleDef(hir::ModuleDef::Function(f)), Some(m)) => {
+            f.is_visible_from(ctx.db, *m)
+        }
+        _ => true,
+    }
+}
+
 pub(crate) fn complete_expr_path(
     acc: &mut Completions,
     ctx: &CompletionContext<'_>,
@@ -37,12 +54,6 @@ pub(crate) fn complete_expr_path(
     let wants_mut_token =
         ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
 
-    let scope_def_applicable = |def| match def {
-        ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_) => false,
-        ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
-        _ => true,
-    };
-
     let add_assoc_item = |acc: &mut Completions, item| match item {
         hir::AssocItem::Function(func) => acc.add_function(ctx, path_ctx, func, None),
         hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
@@ -87,7 +98,7 @@ pub(crate) fn complete_expr_path(
                 hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
                     let module_scope = module.scope(ctx.db, Some(ctx.module));
                     for (name, def) in module_scope {
-                        if scope_def_applicable(def) {
+                        if scope_def_applicable(def, ctx, Some(module)) {
                             acc.add_path_resolution(
                                 ctx,
                                 path_ctx,
@@ -233,7 +244,7 @@ pub(crate) fn complete_expr_path(
                         [..] => acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases),
                     }
                 }
-                _ if scope_def_applicable(def) => {
+                _ if scope_def_applicable(def, ctx, None) => {
                     acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases)
                 }
                 _ => (),
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index d3dbd7cc22777..8d82b5f02bb8f 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -1286,6 +1286,30 @@ fn here_we_go() {
     );
 }
 
+#[test]
+fn completes_only_public() {
+    check(
+        r#"
+//- /e.rs
+pub(self) fn i_should_be_hidden() {}
+pub(in crate::krate) fn i_should_also_be_hidden() {}
+pub fn i_am_public () {}
+
+//- /lib.rs crate:krate
+pub mod e;
+
+//- /main.rs deps:krate crate:main
+use krate::e;
+fn main() {
+    e::$0
+}"#,
+        expect![
+            "fn i_am_public() fn()
+"
+        ],
+    )
+}
+
 #[test]
 fn completion_filtering_excludes_non_identifier_doc_aliases() {
     check_edit(

From d54745aed3a747a907e2fea3528f98c4d4701345 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 11:59:44 +0100
Subject: [PATCH 65/80] fix: Fix item tree lowering pub(self) to pub()

---
 crates/hir-def/src/item_tree/tests.rs         | 12 ++++++++
 crates/hir-def/src/nameres/path_resolution.rs | 14 ++++-----
 crates/hir-def/src/visibility.rs              |  2 +-
 crates/ide-completion/src/completions/expr.rs | 29 ++++++-------------
 crates/ide-completion/src/tests/special.rs    |  2 +-
 5 files changed, 28 insertions(+), 31 deletions(-)

diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs
index 4180f817209e8..96c65b941c1d0 100644
--- a/crates/hir-def/src/item_tree/tests.rs
+++ b/crates/hir-def/src/item_tree/tests.rs
@@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>;
         "#]],
     )
 }
+
+#[test]
+fn pub_self() {
+    check(
+        r#"
+pub(self) struct S;
+        "#,
+        expect![[r#"
+            pub(self) struct S;
+        "#]],
+    )
+}
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index 7c3231913d320..be3438e427dba 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -93,15 +93,11 @@ impl DefMap {
                 if remaining.is_some() {
                     return None;
                 }
-                let types = result.take_types();
-
-                match (types, path.kind) {
-                    (Some(ModuleDefId::ModuleId(m)), _) => Visibility::Module(m),
-                    // resolve_path doesn't find any values for a plan pathkind of a private function
-                    (None, PathKind::Plain | PathKind::Crate) => {
-                        Visibility::Module(self.module_id(original_module))
-                    }
-                    (_, _) => {
+                let types = result.take_types()?;
+                match types {
+                    ModuleDefId::ModuleId(m) => Visibility::Module(m),
+                    // error: visibility needs to refer to module
+                    _ => {
                         return None;
                     }
                 }
diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs
index ab9266aa60f1a..f5803653c73be 100644
--- a/crates/hir-def/src/visibility.rs
+++ b/crates/hir-def/src/visibility.rs
@@ -73,7 +73,7 @@ impl RawVisibility {
                 RawVisibility::Module(path)
             }
             ast::VisibilityKind::PubSelf => {
-                let path = ModPath::from_kind(PathKind::Plain);
+                let path = ModPath::from_kind(PathKind::Super(0));
                 RawVisibility::Module(path)
             }
             ast::VisibilityKind::Pub => RawVisibility::Public,
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 17a52787b8a88..d3c817d4b43ad 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -1,6 +1,6 @@
 //! Completion of names from the current scope in expression position.
 
-use hir::{HasVisibility, Module, ScopeDef};
+use hir::ScopeDef;
 use syntax::ast;
 
 use crate::{
@@ -9,23 +9,6 @@ use crate::{
     CompletionContext, Completions,
 };
 
-fn scope_def_applicable(
-    def: ScopeDef,
-    ctx: &CompletionContext<'_>,
-    module: Option<&Module>,
-) -> bool {
-    match (def, module) {
-        (ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_), _) => {
-            false
-        }
-        (ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)), _) => mac.is_fn_like(ctx.db),
-        (ScopeDef::ModuleDef(hir::ModuleDef::Function(f)), Some(m)) => {
-            f.is_visible_from(ctx.db, *m)
-        }
-        _ => true,
-    }
-}
-
 pub(crate) fn complete_expr_path(
     acc: &mut Completions,
     ctx: &CompletionContext<'_>,
@@ -54,6 +37,12 @@ pub(crate) fn complete_expr_path(
     let wants_mut_token =
         ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
 
+    let scope_def_applicable = |def| match def {
+        ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_) => false,
+        ScopeDef::ModuleDef(hir::ModuleDef::Macro(mac)) => mac.is_fn_like(ctx.db),
+        _ => true,
+    };
+
     let add_assoc_item = |acc: &mut Completions, item| match item {
         hir::AssocItem::Function(func) => acc.add_function(ctx, path_ctx, func, None),
         hir::AssocItem::Const(ct) => acc.add_const(ctx, ct),
@@ -98,7 +87,7 @@ pub(crate) fn complete_expr_path(
                 hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
                     let module_scope = module.scope(ctx.db, Some(ctx.module));
                     for (name, def) in module_scope {
-                        if scope_def_applicable(def, ctx, Some(module)) {
+                        if scope_def_applicable(def) {
                             acc.add_path_resolution(
                                 ctx,
                                 path_ctx,
@@ -244,7 +233,7 @@ pub(crate) fn complete_expr_path(
                         [..] => acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases),
                     }
                 }
-                _ if scope_def_applicable(def, ctx, None) => {
+                _ if scope_def_applicable(def) => {
                     acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases)
                 }
                 _ => (),
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 8d82b5f02bb8f..28c9bffc5ecaf 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -1292,7 +1292,7 @@ fn completes_only_public() {
         r#"
 //- /e.rs
 pub(self) fn i_should_be_hidden() {}
-pub(in crate::krate) fn i_should_also_be_hidden() {}
+pub(in crate::e) fn i_should_also_be_hidden() {}
 pub fn i_am_public () {}
 
 //- /lib.rs crate:krate

From 934358e95cd2c8190396ba4d7119e5c3dc3b2ead Mon Sep 17 00:00:00 2001
From: Ryan Mehri 
Date: Mon, 2 Oct 2023 22:04:59 -0700
Subject: [PATCH 66/80] fix: resolve Self type references in delegate method
 assist

---
 .../src/handlers/generate_delegate_methods.rs | 168 +++++++++++++++++-
 crates/ide-db/src/path_transform.rs           |  32 +++-
 2 files changed, 197 insertions(+), 3 deletions(-)

diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index bbac0a26ea4ca..92fbdf53f6b78 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,6 +1,7 @@
 use std::collections::HashSet;
 
 use hir::{self, HasCrate, HasSource, HasVisibility};
+use ide_db::path_transform::PathTransform;
 use syntax::{
     ast::{
         self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
@@ -106,7 +107,10 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
             |edit| {
                 // Create the function
                 let method_source = match method.source(ctx.db()) {
-                    Some(source) => source.value,
+                    Some(source) => {
+                        ctx.sema.parse_or_expand(source.file_id);
+                        source.value
+                    }
                     None => return,
                 };
                 let vis = method_source.visibility();
@@ -183,6 +187,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
                 let assoc_items = impl_def.get_or_create_assoc_item_list();
                 assoc_items.add_item(f.clone().into());
 
+                PathTransform::generic_transformation(
+                    &ctx.sema.scope(strukt.syntax()).unwrap(),
+                    &ctx.sema.scope(method_source.syntax()).unwrap(),
+                )
+                .apply(f.syntax());
+
                 if let Some(cap) = ctx.config.snippet_cap {
                     edit.add_tabstop_before(cap, f)
                 }
@@ -454,6 +464,162 @@ impl Person {
         );
     }
 
+    #[test]
+    fn test_fixes_basic_self_references() {
+        check_assist(
+            generate_delegate_methods,
+            r#"
+struct Foo {
+    field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+    fn bar(&self, other: Self) -> Self {
+        other
+    }
+}
+"#,
+            r#"
+struct Foo {
+    field: Bar,
+}
+
+impl Foo {
+    $0fn bar(&self, other: Bar) -> Bar {
+        self.field.bar(other)
+    }
+}
+
+struct Bar;
+
+impl Bar {
+    fn bar(&self, other: Self) -> Self {
+        other
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_fixes_nested_self_references() {
+        check_assist(
+            generate_delegate_methods,
+            r#"
+struct Foo {
+    field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+    fn bar(&mut self, a: (Self, [Self; 4]), b: Vec) {}
+}
+"#,
+            r#"
+struct Foo {
+    field: Bar,
+}
+
+impl Foo {
+    $0fn bar(&mut self, a: (Bar, [Bar; 4]), b: Vec) {
+        self.field.bar(a, b)
+    }
+}
+
+struct Bar;
+
+impl Bar {
+    fn bar(&mut self, a: (Self, [Self; 4]), b: Vec) {}
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_fixes_self_references_with_lifetimes_and_generics() {
+        check_assist(
+            generate_delegate_methods,
+            r#"
+struct Foo<'a, T> {
+    $0field: Bar<'a, T>,
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+    fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+        b.pop().unwrap()
+    }
+}
+"#,
+            r#"
+struct Foo<'a, T> {
+    field: Bar<'a, T>,
+}
+
+impl<'a, T> Foo<'a, T> {
+    $0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> {
+        self.field.bar(b)
+    }
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+    fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+        b.pop().unwrap()
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_fixes_self_references_across_macros() {
+        check_assist(
+            generate_delegate_methods,
+            r#"
+//- /bar.rs
+macro_rules! test_method {
+    () => {
+        pub fn test(self, b: Bar) -> Self {
+            self
+        }
+    };
+}
+
+pub struct Bar;
+
+impl Bar {
+    test_method!();
+}
+
+//- /main.rs
+mod bar;
+
+struct Foo {
+    $0bar: bar::Bar,
+}
+"#,
+            r#"
+mod bar;
+
+struct Foo {
+    bar: bar::Bar,
+}
+
+impl Foo {
+    $0pub fn test(self,b:bar::Bar) ->bar::Bar {
+        self.bar.test(b)
+    }
+}
+"#,
+        );
+    }
+
     #[test]
     fn test_generate_delegate_visibility() {
         check_assist_not_applicable(
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index fa9339f30f206..49b990172a541 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -2,7 +2,7 @@
 
 use crate::helpers::mod_path_to_ast;
 use either::Either;
-use hir::{AsAssocItem, HirDisplay, SemanticsScope};
+use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope};
 use rustc_hash::FxHashMap;
 use syntax::{
     ast::{self, make, AstNode},
@@ -332,8 +332,36 @@ impl Ctx<'_> {
                     ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
                 }
             }
+            hir::PathResolution::SelfType(imp) => {
+                let ty = imp.self_ty(self.source_scope.db);
+                let ty_str = &ty
+                    .display_source_code(
+                        self.source_scope.db,
+                        self.source_scope.module().into(),
+                        true,
+                    )
+                    .ok()?;
+                let ast_ty = make::ty(&ty_str).clone_for_update();
+
+                if let Some(adt) = ty.as_adt() {
+                    if let ast::Type::PathType(path_ty) = &ast_ty {
+                        let found_path = self.target_module.find_use_path(
+                            self.source_scope.db.upcast(),
+                            ModuleDef::from(adt),
+                            false,
+                        )?;
+
+                        if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {
+                            let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
+                            ted::replace(path.syntax(), res.syntax());
+                            return Some(());
+                        }
+                    }
+                }
+
+                ted::replace(path.syntax(), ast_ty.syntax());
+            }
             hir::PathResolution::Local(_)
-            | hir::PathResolution::SelfType(_)
             | hir::PathResolution::Def(_)
             | hir::PathResolution::BuiltinAttr(_)
             | hir::PathResolution::ToolModule(_)

From f4349ff26ef0736ed6e37a7e491f01ee4c6a76a7 Mon Sep 17 00:00:00 2001
From: Ryan Mehri 
Date: Thu, 5 Oct 2023 12:45:26 -0700
Subject: [PATCH 67/80] fix: preserve where clause in delegate method

---
 .../src/handlers/generate_delegate_methods.rs | 49 ++++++++++++++++++-
 1 file changed, 48 insertions(+), 1 deletion(-)

diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 92fbdf53f6b78..7a5d3d0859c6e 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -134,7 +134,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
                     vis,
                     fn_name,
                     type_params,
-                    None,
+                    method_source.where_clause(),
                     params,
                     body,
                     ret_type,
@@ -464,6 +464,53 @@ impl Person {
         );
     }
 
+    #[test]
+    fn test_preserve_where_clause() {
+        check_assist(
+            generate_delegate_methods,
+            r#"
+struct Inner(T);
+impl Inner {
+    fn get(&self) -> T
+    where
+        T: Copy,
+        T: PartialEq,
+    {
+        self.0
+    }
+}
+
+struct Struct {
+    $0field: Inner,
+}
+"#,
+            r#"
+struct Inner(T);
+impl Inner {
+    fn get(&self) -> T
+    where
+        T: Copy,
+        T: PartialEq,
+    {
+        self.0
+    }
+}
+
+struct Struct {
+    field: Inner,
+}
+
+impl Struct {
+    $0fn get(&self) -> T where
+            T: Copy,
+            T: PartialEq, {
+        self.field.get()
+    }
+}
+"#,
+        );
+    }
+
     #[test]
     fn test_fixes_basic_self_references() {
         check_assist(

From 7e768cbe703188bb8c126d1be6960637795e4943 Mon Sep 17 00:00:00 2001
From: Ryan Mehri 
Date: Mon, 9 Oct 2023 08:30:34 -0700
Subject: [PATCH 68/80] fix: prefer keeping Self if it is in the same impl def

---
 crates/hir-def/src/resolver.rs                |  8 ++++++++
 crates/hir/src/semantics.rs                   |  4 ++++
 .../src/handlers/generate_delegate_methods.rs | 19 ++++++++-----------
 crates/ide-db/src/path_transform.rs           |  8 ++++++++
 4 files changed, 28 insertions(+), 11 deletions(-)

diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 50da9ed06a0d9..ba0a2c0224a05 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -588,6 +588,14 @@ impl Resolver {
             _ => None,
         })
     }
+
+    pub fn impl_def(&self) -> Option {
+        self.scopes().find_map(|scope| match scope {
+            Scope::ImplDefScope(def) => Some(*def),
+            _ => None,
+        })
+    }
+
     /// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
     #[must_use]
     pub fn update_to_inner_scope(
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 46835ec04e01b..55c14312071a3 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -1556,6 +1556,10 @@ impl SemanticsScope<'_> {
     pub fn extern_crate_decls(&self) -> impl Iterator + '_ {
         self.resolver.extern_crate_decls_in_scope(self.db.upcast())
     }
+
+    pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
+        self.resolver.impl_def() == other.resolver.impl_def()
+    }
 }
 
 #[derive(Debug)]
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index 7a5d3d0859c6e..db1e0ceaec1e7 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,6 +1,6 @@
 use std::collections::HashSet;
 
-use hir::{self, HasCrate, HasSource, HasVisibility};
+use hir::{self, HasCrate, HasVisibility};
 use ide_db::path_transform::PathTransform;
 use syntax::{
     ast::{
@@ -106,11 +106,8 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
             target,
             |edit| {
                 // Create the function
-                let method_source = match method.source(ctx.db()) {
-                    Some(source) => {
-                        ctx.sema.parse_or_expand(source.file_id);
-                        source.value
-                    }
+                let method_source = match ctx.sema.source(method) {
+                    Some(source) => source.value,
                     None => return,
                 };
                 let vis = method_source.visibility();
@@ -187,11 +184,11 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
                 let assoc_items = impl_def.get_or_create_assoc_item_list();
                 assoc_items.add_item(f.clone().into());
 
-                PathTransform::generic_transformation(
-                    &ctx.sema.scope(strukt.syntax()).unwrap(),
-                    &ctx.sema.scope(method_source.syntax()).unwrap(),
-                )
-                .apply(f.syntax());
+                if let Some((target, source)) =
+                    ctx.sema.scope(strukt.syntax()).zip(ctx.sema.scope(method_source.syntax()))
+                {
+                    PathTransform::generic_transformation(&target, &source).apply(f.syntax());
+                }
 
                 if let Some(cap) = ctx.config.snippet_cap {
                     edit.add_tabstop_before(cap, f)
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 49b990172a541..fb4c0c12691db 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -183,6 +183,7 @@ impl<'a> PathTransform<'a> {
             lifetime_substs,
             target_module,
             source_scope: self.source_scope,
+            same_self_type: self.target_scope.has_same_self_type(self.source_scope),
         };
         ctx.transform_default_values(defaulted_params);
         ctx
@@ -195,6 +196,7 @@ struct Ctx<'a> {
     lifetime_substs: FxHashMap,
     target_module: hir::Module,
     source_scope: &'a SemanticsScope<'a>,
+    same_self_type: bool,
 }
 
 fn postorder(item: &SyntaxNode) -> impl Iterator {
@@ -333,6 +335,11 @@ impl Ctx<'_> {
                 }
             }
             hir::PathResolution::SelfType(imp) => {
+                // keep Self type if it does not need to be replaced
+                if self.same_self_type {
+                    return None;
+                }
+
                 let ty = imp.self_ty(self.source_scope.db);
                 let ty_str = &ty
                     .display_source_code(
@@ -349,6 +356,7 @@ impl Ctx<'_> {
                             self.source_scope.db.upcast(),
                             ModuleDef::from(adt),
                             false,
+                            true,
                         )?;
 
                         if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {

From 14a7a614c177161243c7eaf005a12521d8e10244 Mon Sep 17 00:00:00 2001
From: Jessie Chatham Spencer 
Date: Wed, 13 Sep 2023 04:42:59 +0000
Subject: [PATCH 69/80] WIP - Sort suggested imports by type for data types

---
 crates/hir/src/lib.rs                        |   4 +-
 crates/ide-completion/src/render.rs          | 369 ++++++++++++++++++-
 crates/ide-completion/src/render/function.rs |  55 ++-
 3 files changed, 418 insertions(+), 10 deletions(-)

diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 5137bff055a4b..2ec382be6e331 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -4567,8 +4567,8 @@ impl Type {
 // FIXME: Document this
 #[derive(Debug)]
 pub struct Callable {
-    ty: Type,
-    sig: CallableSig,
+    pub ty: Type,
+    pub sig: CallableSig,
     callee: Callee,
     /// Whether this is a method that was called with method call syntax.
     pub(crate) is_bound_method: bool,
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 830d7cabab585..6f5d211188445 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -10,7 +10,9 @@ pub(crate) mod variant;
 pub(crate) mod union_literal;
 pub(crate) mod literal;
 
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
+use core::panic;
+
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
 use ide_db::{
     documentation::{Documentation, HasDocs},
     helpers::item_name,
@@ -340,6 +342,7 @@ fn render_resolution_path(
     let cap = ctx.snippet_cap();
     let db = completion.db;
     let config = completion.config;
+    let requires_import = import_to_add.is_some();
 
     let name = local_name.to_smol_str();
     let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
@@ -370,8 +373,8 @@ fn render_resolution_path(
             }
         }
     }
-    if let ScopeDef::Local(local) = resolution {
-        let ty = local.ty(db);
+
+    let mut set_item_relevance = |ty: Type| {
         if !ty.is_unknown() {
             item.detail(ty.display(db).to_string());
         }
@@ -379,12 +382,40 @@ fn render_resolution_path(
         item.set_relevance(CompletionRelevance {
             type_match: compute_type_match(completion, &ty),
             exact_name_match: compute_exact_name_match(completion, &name),
-            is_local: true,
+            is_local: matches!(resolution, ScopeDef::Local(_)),
+            requires_import,
             ..CompletionRelevance::default()
         });
 
         path_ref_match(completion, path_ctx, &ty, &mut item);
     };
+
+    match resolution {
+        ScopeDef::Local(local) => set_item_relevance(local.ty(db)),
+        ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => {
+            set_item_relevance(adt.ty(db))
+        }
+        ScopeDef::ModuleDef(ModuleDef::Function(func)) => {
+            set_item_relevance(func.ty(db).as_callable(db).unwrap().ty)
+        }
+        ScopeDef::ModuleDef(ModuleDef::Variant(variant)) => {
+            set_item_relevance(variant.parent_enum(db).ty(db))
+        }
+        ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)),
+        ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)),
+        ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)),
+        ScopeDef::ImplSelfType(imp) => set_item_relevance(imp.self_ty(db)),
+
+        ScopeDef::GenericParam(_)
+        | ScopeDef::Label(_)
+        | ScopeDef::Unknown
+        | ScopeDef::ModuleDef(ModuleDef::Trait(_))
+        | ScopeDef::ModuleDef(ModuleDef::TraitAlias(_))
+        | ScopeDef::ModuleDef(ModuleDef::Macro(_))
+        | ScopeDef::ModuleDef(ModuleDef::Module(_))
+        | ScopeDef::ModuleDef(ModuleDef::TypeAlias(_)) => (),
+    };
+
     item
 }
 
@@ -492,6 +523,28 @@ fn compute_type_match(
     }
 }
 
+fn compute_type_match2(
+    ctx: &CompletionContext<'_>,
+    completion_ty1: &hir::Type,
+    completion_ty2: &hir::Type,
+) -> Option {
+    let expected_type = completion_ty1;
+
+    // We don't ever consider unit type to be an exact type match, since
+    // nearly always this is not meaningful to the user.
+    if expected_type.is_unit() {
+        return None;
+    }
+
+    if completion_ty2 == expected_type {
+        Some(CompletionRelevanceTypeMatch::Exact)
+    } else if expected_type.could_unify_with(ctx.db, completion_ty2) {
+        Some(CompletionRelevanceTypeMatch::CouldUnify)
+    } else {
+        None
+    }
+}
+
 fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
     ctx.expected_name.as_ref().map_or(false, |name| name.text() == completion_name)
 }
@@ -635,6 +688,314 @@ mod tests {
         }
     }
 
+    #[test]
+    fn set_struct_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+    pub struct Struct {}
+}
+
+pub mod test_mod_a {
+    pub struct Struct {}
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Struct) { }
+
+fn main() {
+    test(Struct$0);
+}
+"#,
+            expect![[r#"
+                st dep::test_mod_b::Struct {…} [type_could_unify]
+                st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                st Struct (use dep::test_mod_a::Struct) [requires_import]
+            "#]],
+        );
+    }
+
+    #[test]
+    fn set_union_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+    pub union Union {
+            a: i32,
+            b: i32
+            }
+}
+
+pub mod test_mod_a {
+    pub enum Union {
+                a: i32,
+                b: i32
+            }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Union) { }
+
+fn main() {
+    test(Union$0);
+}
+"#,
+            expect![[r#"
+                un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                en Union (use dep::test_mod_a::Union) [requires_import]
+            "#]],
+        );
+    }
+
+    #[test]
+    fn set_enum_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+    pub enum Enum {
+                variant
+            }
+}
+
+pub mod test_mod_a {
+    pub enum Enum {
+                variant
+            }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+    test(Enum$0);
+}
+"#,
+            expect![[r#"
+                ev dep::test_mod_b::Enum::variant [type_could_unify]
+                en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                en Enum (use dep::test_mod_a::Enum) [requires_import]
+            "#]],
+        );
+    }
+
+    // TODO: does this test even make sense?
+    #[test]
+    fn set_enum_variant_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+    pub enum Enum {
+                Variant
+            }
+}
+
+pub mod test_mod_a {
+    pub enum Enum {
+                Variant
+            }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+    test(Enum$0);
+}
+"#,
+            expect![[r#"
+                ev dep::test_mod_b::Enum::Variant [type_could_unify]
+                en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                en Enum (use dep::test_mod_a::Enum) [requires_import]
+            "#]],
+        );
+    }
+
+    #[test]
+    fn set_fn_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+    pub fn Function(j: isize) -> i32 {
+            }
+}
+
+            pub mod test_mod_a {
+    pub fn Function(i: usize) -> i32 {
+            }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: fn(usize) -> i32) { }
+
+fn main() {
+    test(Function$0);
+}
+"#,
+            expect![[r#"
+                fn Function (use dep::test_mod_a::Function) [type_could_unify+requires_import]
+                fn main []
+                fn test []
+                md dep []
+                fn Function (use dep::test_mod_b::Function) [requires_import]
+            "#]],
+        );
+    }
+
+    // TODO This test does not trigger the const case
+    #[test]
+    fn set_const_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+            pub const CONST: i32 = 1;
+}
+
+pub mod test_mod_a {
+            pub const CONST: i64 = 2;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+    test(CONST$0);
+}
+"#,
+            expect![[r#"
+                ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                ct CONST (use dep::test_mod_a::CONST) [requires_import]
+            "#]],
+        );
+    }
+
+    #[test]
+    fn set_static_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+            pub static STATIC: i32 = 5;
+}
+
+pub mod test_mod_a {
+            pub static STATIC: i64 = 5;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+    test(STATIC$0);
+}
+"#,
+            expect![[r#"
+                sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import]
+                fn main() []
+                fn test(…) []
+                md dep []
+                sc STATIC (use dep::test_mod_a::STATIC) [requires_import]
+            "#]],
+        );
+    }
+
+    // TODO: seems like something is going wrong here. Exapt type match has no effect
+    // EDIT: maybe it is actually working
+    #[test]
+    fn set_self_type_completion_info() {
+        check_relevance(
+            r#"
+//- /main.rs crate:main
+
+struct Struct;
+
+impl Struct {
+fn test(&self) {
+        func(Self$0);            
+    }
+}
+
+fn func(input: Struct) { }
+
+"#,
+            expect![[r#"
+                st Struct [type]
+                st Self [type]
+                sp Self [type]
+                st Struct [type]
+                lc self [local]
+                fn func(…) []
+                me self.test() []
+            "#]],
+        );
+    }
+
+    // TODO: how do we actually test builtins?
+
+    #[test]
+    fn set_builtin_type_completion_info() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+            static STATIC: i32 = 5;
+}
+
+            pub mod test_mod_a {
+            static STATIC: &str = "test";
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+    test(STATIC$0);
+}
+"#,
+            expect![[r#"
+                fn main() []
+                fn test(…) []
+                md dep []
+            "#]],
+        );
+    }
+
     #[test]
     fn enum_detail_includes_record_fields() {
         check(
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index dfae715afe36d..c2e06c926f129 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -1,6 +1,6 @@
 //! Renderer for function calls.
 
-use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
+use hir::{db::HirDatabase, AsAssocItem, Callable, HirDisplay, Type};
 use ide_db::{SnippetCap, SymbolKind};
 use itertools::Itertools;
 use stdx::{format_to, to_lower_snake_case};
@@ -8,8 +8,14 @@ use syntax::{AstNode, SmolStr};
 
 use crate::{
     context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
-    item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
-    render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext},
+    item::{
+        Builder, CompletionItem, CompletionItemKind, CompletionRelevance,
+        CompletionRelevanceTypeMatch,
+    },
+    render::{
+        compute_exact_name_match, compute_ref_match, compute_type_match, compute_type_match2,
+        RenderContext,
+    },
     CallableSnippets,
 };
 
@@ -62,6 +68,7 @@ fn render(
         ),
         _ => (name.unescaped().to_smol_str(), name.to_smol_str()),
     };
+
     let mut item = CompletionItem::new(
         if func.self_param(db).is_some() {
             CompletionItemKind::Method
@@ -77,8 +84,48 @@ fn render(
         .as_assoc_item(ctx.db())
         .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
         .map_or(false, |trait_| completion.is_ops_trait(trait_));
+
+    // TODO next step figure out how to unify function typesk, we need to convert fndef to actual callable type
+
+    let type_match = if let Some(ref t) = completion.expected_type {
+        if let Some(t) = t.as_callable(db) {
+            let (mut param_types_exp, ret_type_exp) = (
+                t.params(db).into_iter().map(|(_, ty)| ty).collect::>(),
+                t.return_type(),
+            );
+
+            param_types_exp.push(ret_type_exp);
+
+            let mut param_types = func
+                .ty(db)
+                .as_callable(db)
+                .unwrap()
+                .params(db)
+                .into_iter()
+                .map(|(_, ty)| ty)
+                .collect::>();
+            param_types.push(ret_type.clone());
+
+            if param_types.len() != param_types_exp.len() {
+                None
+            } else {
+                if param_types_exp.iter().zip(param_types).all(|(expected_type, item_type)| {
+                    compute_type_match2(completion, &expected_type, &item_type).is_some()
+                }) {
+                    Some(CompletionRelevanceTypeMatch::CouldUnify)
+                } else {
+                    None
+                }
+            }
+        } else {
+            None
+        }
+    } else {
+        None
+    };
+
     item.set_relevance(CompletionRelevance {
-        type_match: compute_type_match(completion, &ret_type),
+        type_match,
         exact_name_match: compute_exact_name_match(completion, &call),
         is_op_method,
         ..ctx.completion_relevance()

From 6abba17a5bbdc27258a91233039a2e9b444d0d20 Mon Sep 17 00:00:00 2001
From: Jessie Chatham Spencer 
Date: Sat, 4 Nov 2023 12:31:42 +0000
Subject: [PATCH 70/80] Implement function type matching

---
 crates/hir/src/lib.rs                        |   4 +-
 crates/ide-completion/src/render.rs          | 141 ++++++++++++-------
 crates/ide-completion/src/render/function.rs |  51 +------
 3 files changed, 95 insertions(+), 101 deletions(-)

diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 2ec382be6e331..5137bff055a4b 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -4567,8 +4567,8 @@ impl Type {
 // FIXME: Document this
 #[derive(Debug)]
 pub struct Callable {
-    pub ty: Type,
-    pub sig: CallableSig,
+    ty: Type,
+    sig: CallableSig,
     callee: Callee,
     /// Whether this is a method that was called with method call syntax.
     pub(crate) is_bound_method: bool,
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 6f5d211188445..309c1937492c1 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -10,9 +10,7 @@ pub(crate) mod variant;
 pub(crate) mod union_literal;
 pub(crate) mod literal;
 
-use core::panic;
-
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
+use hir::{AsAssocItem, Function, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
 use ide_db::{
     documentation::{Documentation, HasDocs},
     helpers::item_name,
@@ -395,17 +393,14 @@ fn render_resolution_path(
         ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => {
             set_item_relevance(adt.ty(db))
         }
-        ScopeDef::ModuleDef(ModuleDef::Function(func)) => {
-            set_item_relevance(func.ty(db).as_callable(db).unwrap().ty)
-        }
-        ScopeDef::ModuleDef(ModuleDef::Variant(variant)) => {
-            set_item_relevance(variant.parent_enum(db).ty(db))
-        }
+        // Functions are handled at the start of the function.
+        ScopeDef::ModuleDef(ModuleDef::Function(_)) => (), // TODO: Should merge with the match case earlier in the function?
+        // Enum variants are handled at the start of the function.
+        ScopeDef::ModuleDef(ModuleDef::Variant(_)) => (),
         ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)),
         ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)),
         ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)),
         ScopeDef::ImplSelfType(imp) => set_item_relevance(imp.self_ty(db)),
-
         ScopeDef::GenericParam(_)
         | ScopeDef::Label(_)
         | ScopeDef::Unknown
@@ -502,6 +497,20 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
     }
 }
 
+fn match_types(
+    ctx: &CompletionContext<'_>,
+    ty1: &hir::Type,
+    ty2: &hir::Type,
+) -> Option {
+    if ty1 == ty2 {
+        Some(CompletionRelevanceTypeMatch::Exact)
+    } else if ty1.could_unify_with(ctx.db, ty2) {
+        Some(CompletionRelevanceTypeMatch::CouldUnify)
+    } else {
+        None
+    }
+}
+
 fn compute_type_match(
     ctx: &CompletionContext<'_>,
     completion_ty: &hir::Type,
@@ -514,35 +523,42 @@ fn compute_type_match(
         return None;
     }
 
-    if completion_ty == expected_type {
-        Some(CompletionRelevanceTypeMatch::Exact)
-    } else if expected_type.could_unify_with(ctx.db, completion_ty) {
-        Some(CompletionRelevanceTypeMatch::CouldUnify)
-    } else {
-        None
-    }
+    match_types(ctx, expected_type, completion_ty)
 }
 
-fn compute_type_match2(
+fn compute_function_type_match(
     ctx: &CompletionContext<'_>,
-    completion_ty1: &hir::Type,
-    completion_ty2: &hir::Type,
+    func: &Function,
 ) -> Option {
-    let expected_type = completion_ty1;
+    // We compute a vec of function parameters + the return type for the expected
+    // type as well as the function we are matching with. Doing this allows for
+    // matching all of the types in one iterator.
 
-    // We don't ever consider unit type to be an exact type match, since
-    // nearly always this is not meaningful to the user.
-    if expected_type.is_unit() {
+    let expected_callable = ctx.expected_type.as_ref()?.as_callable(ctx.db)?;
+    let expected_types = expected_callable.params(ctx.db).into_iter().map(|param| param.1);
+    let actual_types =
+        func.ty(ctx.db).as_callable(ctx.db)?.params(ctx.db).into_iter().map(|param| param.1);
+
+    if expected_types.len() != actual_types.len() {
         return None;
     }
 
-    if completion_ty2 == expected_type {
-        Some(CompletionRelevanceTypeMatch::Exact)
-    } else if expected_type.could_unify_with(ctx.db, completion_ty2) {
-        Some(CompletionRelevanceTypeMatch::CouldUnify)
-    } else {
-        None
+    let mut matches = expected_types
+        .zip(actual_types)
+        .chain([(expected_callable.return_type(), func.ret_type(ctx.db))])
+        .map(|(expected_type, actual_type)| match_types(ctx, &expected_type, &actual_type));
+
+    // Any missing type match indicates that these types can not be unified.
+    if matches.any(|type_match| type_match.is_none()) {
+        return None;
     }
+
+    // If any of the types are unifiable but not exact we consider the function types as a whole
+    // to be unifiable. Otherwise if every pair of types is an exact match the functions are an
+    // exact type match.
+    matches
+        .find(|type_match| matches!(type_match, Some(CompletionRelevanceTypeMatch::CouldUnify)))
+        .unwrap_or(Some(CompletionRelevanceTypeMatch::Exact))
 }
 
 fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
@@ -796,7 +812,7 @@ fn main() {
         );
     }
 
-    // TODO: does this test even make sense?
+    // TODO: How dowe test ModuleDef::Variant(Variant?)
     #[test]
     fn set_enum_variant_type_completion_info() {
         check_relevance(
@@ -820,7 +836,7 @@ pub mod test_mod_a {
 fn test(input: dep::test_mod_b::Enum) { }
 
 fn main() {
-    test(Enum$0);
+    test(Enum::Variant$0);
 }
 "#,
             expect![[r#"
@@ -859,7 +875,7 @@ fn main() {
 }
 "#,
             expect![[r#"
-                fn Function (use dep::test_mod_a::Function) [type_could_unify+requires_import]
+                fn Function (use dep::test_mod_a::Function) [type+requires_import]
                 fn main []
                 fn test []
                 md dep []
@@ -868,7 +884,6 @@ fn main() {
         );
     }
 
-    // TODO This test does not trigger the const case
     #[test]
     fn set_const_type_completion_info() {
         check_relevance(
@@ -933,8 +948,38 @@ fn main() {
         );
     }
 
-    // TODO: seems like something is going wrong here. Exapt type match has no effect
-    // EDIT: maybe it is actually working
+    #[test]
+    fn set_self_type_completion_info_with_params() {
+        check_relevance(
+            r#"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+impl Struct {
+    pub fn Function(&self, input: i32) -> bool {
+                false
+    }
+}
+
+
+//- /main.rs crate:main deps:dep
+
+use dep::Struct;
+
+
+fn test(input: fn(&dep::Struct, i32) -> bool) { }
+
+fn main() {
+    test(Struct::Function$0);
+}
+
+"#,
+            expect![[r#"
+                me Function [type]
+            "#]],
+        );
+    }
+
     #[test]
     fn set_self_type_completion_info() {
         check_relevance(
@@ -964,34 +1009,26 @@ fn func(input: Struct) { }
         );
     }
 
-    // TODO: how do we actually test builtins?
-
     #[test]
     fn set_builtin_type_completion_info() {
         check_relevance(
             r#"
-//- /lib.rs crate:dep
-
-pub mod test_mod_b {
-            static STATIC: i32 = 5;
-}
+//- /main.rs crate:main 
 
-            pub mod test_mod_a {
-            static STATIC: &str = "test";
-}
-
-//- /main.rs crate:main deps:dep
-
-fn test(input: i32) { }
+fn test(input: bool) { }
+    pub Input: bool = false; 
 
 fn main() {
-    test(STATIC$0);
+    let input = false; 
+    let inputbad = 3; 
+    test(inp$0);
 }
 "#,
             expect![[r#"
+                lc input [type+name+local]
+                lc inputbad [local]
                 fn main() []
                 fn test(…) []
-                md dep []
             "#]],
         );
     }
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index c2e06c926f129..ff84aa8742ea4 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -1,6 +1,6 @@
 //! Renderer for function calls.
 
-use hir::{db::HirDatabase, AsAssocItem, Callable, HirDisplay, Type};
+use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
 use ide_db::{SnippetCap, SymbolKind};
 use itertools::Itertools;
 use stdx::{format_to, to_lower_snake_case};
@@ -8,13 +8,9 @@ use syntax::{AstNode, SmolStr};
 
 use crate::{
     context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
-    item::{
-        Builder, CompletionItem, CompletionItemKind, CompletionRelevance,
-        CompletionRelevanceTypeMatch,
-    },
+    item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
     render::{
-        compute_exact_name_match, compute_ref_match, compute_type_match, compute_type_match2,
-        RenderContext,
+        compute_exact_name_match, compute_function_type_match, compute_ref_match, RenderContext,
     },
     CallableSnippets,
 };
@@ -85,47 +81,8 @@ fn render(
         .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
         .map_or(false, |trait_| completion.is_ops_trait(trait_));
 
-    // TODO next step figure out how to unify function typesk, we need to convert fndef to actual callable type
-
-    let type_match = if let Some(ref t) = completion.expected_type {
-        if let Some(t) = t.as_callable(db) {
-            let (mut param_types_exp, ret_type_exp) = (
-                t.params(db).into_iter().map(|(_, ty)| ty).collect::>(),
-                t.return_type(),
-            );
-
-            param_types_exp.push(ret_type_exp);
-
-            let mut param_types = func
-                .ty(db)
-                .as_callable(db)
-                .unwrap()
-                .params(db)
-                .into_iter()
-                .map(|(_, ty)| ty)
-                .collect::>();
-            param_types.push(ret_type.clone());
-
-            if param_types.len() != param_types_exp.len() {
-                None
-            } else {
-                if param_types_exp.iter().zip(param_types).all(|(expected_type, item_type)| {
-                    compute_type_match2(completion, &expected_type, &item_type).is_some()
-                }) {
-                    Some(CompletionRelevanceTypeMatch::CouldUnify)
-                } else {
-                    None
-                }
-            }
-        } else {
-            None
-        }
-    } else {
-        None
-    };
-
     item.set_relevance(CompletionRelevance {
-        type_match,
+        type_match: compute_function_type_match(completion, &func),
         exact_name_match: compute_exact_name_match(completion, &call),
         is_op_method,
         ..ctx.completion_relevance()

From 1475848250288fcb0adbbede11c541b0a7458fb9 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 13:19:34 +0100
Subject: [PATCH 71/80] Cleanup

---
 crates/ide-completion/src/completions/dot.rs  |  12 +-
 .../src/completions/item_list/trait_impl.rs   |  30 +-
 crates/ide-completion/src/completions/use_.rs |   4 +-
 crates/ide-completion/src/item.rs             |   9 +-
 crates/ide-completion/src/render.rs           | 142 ++---
 crates/ide-completion/src/render/function.rs  |  67 +--
 crates/ide-completion/src/tests/expression.rs |  98 ++--
 crates/ide-completion/src/tests/flyimport.rs  |  54 +-
 crates/ide-completion/src/tests/item.rs       |  24 +-
 crates/ide-completion/src/tests/pattern.rs    |   4 +-
 crates/ide-completion/src/tests/predicate.rs  |  42 +-
 crates/ide-completion/src/tests/record.rs     |   4 +-
 crates/ide-completion/src/tests/special.rs    |  70 +--
 crates/ide-completion/src/tests/type_pos.rs   | 486 +++++++++---------
 crates/ide-completion/src/tests/use_tree.rs   |  20 +-
 15 files changed, 516 insertions(+), 550 deletions(-)

diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 57e06461099e5..613a35dcb1088 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -940,9 +940,9 @@ impl Foo { fn foo(&self) { $0 } }"#,
             expect![[r#"
                 fd self.field i32
                 lc self       &Foo
-                sp Self
-                st Foo
-                bt u32
+                sp Self       Foo
+                st Foo        Foo
+                bt u32        u32
                 me self.foo() fn(&self)
             "#]],
         );
@@ -954,9 +954,9 @@ impl Foo { fn foo(&mut self) { $0 } }"#,
             expect![[r#"
                 fd self.0     i32
                 lc self       &mut Foo
-                sp Self
-                st Foo
-                bt u32
+                sp Self       Foo
+                st Foo        Foo
+                bt u32        u32
                 me self.foo() fn(&mut self)
             "#]],
         );
diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 42dfbfc7d9a14..b0e4d8a5acd10 100644
--- a/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -417,10 +417,10 @@ impl Test for T {
 }
 ",
             expect![[r#"
-                sp Self
-                st T
+                sp Self T
+                st T    T
                 tt Test
-                bt u32
+                bt u32  u32
             "#]],
         );
 
@@ -526,10 +526,10 @@ impl Test for T {
 }
 ",
             expect![[r#"
-                sp Self
-                st T
+                sp Self T
+                st T    T
                 tt Test
-                bt u32
+                bt u32  u32
             "#]],
         );
 
@@ -543,10 +543,10 @@ impl Test for T {
 }
 ",
             expect![[r#"
-                sp Self
-                st T
+                sp Self T
+                st T    T
                 tt Test
-                bt u32
+                bt u32  u32
             "#]],
         );
 
@@ -562,10 +562,10 @@ impl Test for T {
 }
 ",
             expect![[r#"
-                sp Self
-                st T
+                sp Self T
+                st T    T
                 tt Test
-                bt u32
+                bt u32  u32
             "#]],
         );
 
@@ -610,10 +610,10 @@ impl Test for T {
 }
 ",
             expect![[r#"
-                sp Self
-                st T
+                sp Self T
+                st T    T
                 tt Test
-                bt u32
+                bt u32  u32
             "#]],
         );
 
diff --git a/crates/ide-completion/src/completions/use_.rs b/crates/ide-completion/src/completions/use_.rs
index 7a60030e9ef72..81107c1f419d2 100644
--- a/crates/ide-completion/src/completions/use_.rs
+++ b/crates/ide-completion/src/completions/use_.rs
@@ -71,9 +71,9 @@ pub(crate) fn complete_use_path(
 
                         if add_resolution {
                             let mut builder = Builder::from_resolution(ctx, path_ctx, name, def);
-                            builder.set_relevance(CompletionRelevance {
+                            builder.with_relevance(|r| CompletionRelevance {
                                 is_name_already_imported,
-                                ..Default::default()
+                                ..r
                             });
                             acc.add(builder.build(ctx.db));
                         }
diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs
index 99b895eed4d23..b982322a73499 100644
--- a/crates/ide-completion/src/item.rs
+++ b/crates/ide-completion/src/item.rs
@@ -1,6 +1,6 @@
 //! See `CompletionItem` structure.
 
-use std::fmt;
+use std::{fmt, mem};
 
 use hir::Mutability;
 use ide_db::{
@@ -570,6 +570,13 @@ impl Builder {
         self.relevance = relevance;
         self
     }
+    pub(crate) fn with_relevance(
+        &mut self,
+        relevance: impl FnOnce(CompletionRelevance) -> CompletionRelevance,
+    ) -> &mut Builder {
+        self.relevance = relevance(mem::take(&mut self.relevance));
+        self
+    }
     pub(crate) fn trigger_call_info(&mut self) -> &mut Builder {
         self.trigger_call_info = true;
         self
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 309c1937492c1..2ea3f74d18bce 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@ pub(crate) mod variant;
 pub(crate) mod union_literal;
 pub(crate) mod literal;
 
-use hir::{AsAssocItem, Function, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
 use ide_db::{
     documentation::{Documentation, HasDocs},
     helpers::item_name,
@@ -393,10 +393,10 @@ fn render_resolution_path(
         ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => {
             set_item_relevance(adt.ty(db))
         }
-        // Functions are handled at the start of the function.
-        ScopeDef::ModuleDef(ModuleDef::Function(_)) => (), // TODO: Should merge with the match case earlier in the function?
-        // Enum variants are handled at the start of the function.
-        ScopeDef::ModuleDef(ModuleDef::Variant(_)) => (),
+        // Filtered out above
+        ScopeDef::ModuleDef(
+            ModuleDef::Function(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_),
+        ) => (),
         ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)),
         ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)),
         ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)),
@@ -404,11 +404,12 @@ fn render_resolution_path(
         ScopeDef::GenericParam(_)
         | ScopeDef::Label(_)
         | ScopeDef::Unknown
-        | ScopeDef::ModuleDef(ModuleDef::Trait(_))
-        | ScopeDef::ModuleDef(ModuleDef::TraitAlias(_))
-        | ScopeDef::ModuleDef(ModuleDef::Macro(_))
-        | ScopeDef::ModuleDef(ModuleDef::Module(_))
-        | ScopeDef::ModuleDef(ModuleDef::TypeAlias(_)) => (),
+        | ScopeDef::ModuleDef(
+            ModuleDef::Trait(_)
+            | ModuleDef::TraitAlias(_)
+            | ModuleDef::Module(_)
+            | ModuleDef::TypeAlias(_),
+        ) => (),
     };
 
     item
@@ -497,6 +498,7 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
     }
 }
 
+// FIXME: This checks types without possible coercions which some completions might want to do
 fn match_types(
     ctx: &CompletionContext<'_>,
     ty1: &hir::Type,
@@ -526,41 +528,6 @@ fn compute_type_match(
     match_types(ctx, expected_type, completion_ty)
 }
 
-fn compute_function_type_match(
-    ctx: &CompletionContext<'_>,
-    func: &Function,
-) -> Option {
-    // We compute a vec of function parameters + the return type for the expected
-    // type as well as the function we are matching with. Doing this allows for
-    // matching all of the types in one iterator.
-
-    let expected_callable = ctx.expected_type.as_ref()?.as_callable(ctx.db)?;
-    let expected_types = expected_callable.params(ctx.db).into_iter().map(|param| param.1);
-    let actual_types =
-        func.ty(ctx.db).as_callable(ctx.db)?.params(ctx.db).into_iter().map(|param| param.1);
-
-    if expected_types.len() != actual_types.len() {
-        return None;
-    }
-
-    let mut matches = expected_types
-        .zip(actual_types)
-        .chain([(expected_callable.return_type(), func.ret_type(ctx.db))])
-        .map(|(expected_type, actual_type)| match_types(ctx, &expected_type, &actual_type));
-
-    // Any missing type match indicates that these types can not be unified.
-    if matches.any(|type_match| type_match.is_none()) {
-        return None;
-    }
-
-    // If any of the types are unifiable but not exact we consider the function types as a whole
-    // to be unifiable. Otherwise if every pair of types is an exact match the functions are an
-    // exact type match.
-    matches
-        .find(|type_match| matches!(type_match, Some(CompletionRelevanceTypeMatch::CouldUnify)))
-        .unwrap_or(Some(CompletionRelevanceTypeMatch::Exact))
-}
-
 fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
     ctx.expected_name.as_ref().map_or(false, |name| name.text() == completion_name)
 }
@@ -745,16 +712,16 @@ fn main() {
 
 pub mod test_mod_b {
     pub union Union {
-            a: i32,
-            b: i32
-            }
+        a: i32,
+        b: i32
+    }
 }
 
 pub mod test_mod_a {
     pub enum Union {
-                a: i32,
-                b: i32
-            }
+        a: i32,
+        b: i32
+    }
 }
 
 //- /main.rs crate:main deps:dep
@@ -783,14 +750,14 @@ fn main() {
 
 pub mod test_mod_b {
     pub enum Enum {
-                variant
-            }
+        variant
+    }
 }
 
 pub mod test_mod_a {
     pub enum Enum {
-                variant
-            }
+        variant
+    }
 }
 
 //- /main.rs crate:main deps:dep
@@ -812,7 +779,6 @@ fn main() {
         );
     }
 
-    // TODO: How dowe test ModuleDef::Variant(Variant?)
     #[test]
     fn set_enum_variant_type_completion_info() {
         check_relevance(
@@ -821,14 +787,14 @@ fn main() {
 
 pub mod test_mod_b {
     pub enum Enum {
-                Variant
-            }
+        Variant
+    }
 }
 
 pub mod test_mod_a {
     pub enum Enum {
-                Variant
-            }
+        Variant
+    }
 }
 
 //- /main.rs crate:main deps:dep
@@ -836,16 +802,14 @@ pub mod test_mod_a {
 fn test(input: dep::test_mod_b::Enum) { }
 
 fn main() {
-    test(Enum::Variant$0);
+    test(Variant$0);
 }
 "#,
             expect![[r#"
                 ev dep::test_mod_b::Enum::Variant [type_could_unify]
-                en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
                 fn main() []
                 fn test(…) []
                 md dep []
-                en Enum (use dep::test_mod_a::Enum) [requires_import]
             "#]],
         );
     }
@@ -857,13 +821,11 @@ fn main() {
 //- /lib.rs crate:dep
 
 pub mod test_mod_b {
-    pub fn Function(j: isize) -> i32 {
-            }
+    pub fn function(j: isize) -> i32 {}
 }
 
-            pub mod test_mod_a {
-    pub fn Function(i: usize) -> i32 {
-            }
+pub mod test_mod_a {
+    pub fn function(i: usize) -> i32 {}
 }
 
 //- /main.rs crate:main deps:dep
@@ -871,15 +833,15 @@ pub mod test_mod_b {
 fn test(input: fn(usize) -> i32) { }
 
 fn main() {
-    test(Function$0);
+    test(function$0);
 }
 "#,
             expect![[r#"
-                fn Function (use dep::test_mod_a::Function) [type+requires_import]
                 fn main []
                 fn test []
                 md dep []
-                fn Function (use dep::test_mod_b::Function) [requires_import]
+                fn function (use dep::test_mod_a::function) [requires_import]
+                fn function (use dep::test_mod_b::function) [requires_import]
             "#]],
         );
     }
@@ -891,11 +853,11 @@ fn main() {
 //- /lib.rs crate:dep
 
 pub mod test_mod_b {
-            pub const CONST: i32 = 1;
+    pub const CONST: i32 = 1;
 }
 
 pub mod test_mod_a {
-            pub const CONST: i64 = 2;
+    pub const CONST: i64 = 2;
 }
 
 //- /main.rs crate:main deps:dep
@@ -923,11 +885,11 @@ fn main() {
 //- /lib.rs crate:dep
 
 pub mod test_mod_b {
-            pub static STATIC: i32 = 5;
+    pub static STATIC: i32 = 5;
 }
 
 pub mod test_mod_a {
-            pub static STATIC: i64 = 5;
+    pub static STATIC: i64 = 5;
 }
 
 //- /main.rs crate:main deps:dep
@@ -975,7 +937,7 @@ fn main() {
 
 "#,
             expect![[r#"
-                me Function [type]
+                me Function []
             "#]],
         );
     }
@@ -990,7 +952,7 @@ struct Struct;
 
 impl Struct {
 fn test(&self) {
-        func(Self$0);            
+        func(Self$0);
     }
 }
 
@@ -1013,14 +975,14 @@ fn func(input: Struct) { }
     fn set_builtin_type_completion_info() {
         check_relevance(
             r#"
-//- /main.rs crate:main 
+//- /main.rs crate:main
 
 fn test(input: bool) { }
-    pub Input: bool = false; 
+    pub Input: bool = false;
 
 fn main() {
-    let input = false; 
-    let inputbad = 3; 
+    let input = false;
+    let inputbad = 3;
     test(inp$0);
 }
 "#,
@@ -1424,6 +1386,7 @@ use self::E::*;
                         kind: SymbolKind(
                             Enum,
                         ),
+                        detail: "E",
                         documentation: Documentation(
                             "enum docs",
                         ),
@@ -1668,6 +1631,7 @@ fn go(world: &WorldSnapshot) { go(w$0) }
                 st WorldSnapshot {…} []
                 st &WorldSnapshot {…} [type]
                 st WorldSnapshot []
+                st &WorldSnapshot [type]
                 fn go(…) []
             "#]],
         );
@@ -1767,6 +1731,7 @@ fn main() {
                 st S []
                 st &mut S [type]
                 st S []
+                st &mut S [type]
                 fn foo(…) []
                 fn main() []
             "#]],
@@ -1783,7 +1748,7 @@ fn main() {
             expect![[r#"
                 lc s [type+name+local]
                 st S [type]
-                st S []
+                st S [type]
                 fn foo(…) []
                 fn main() []
             "#]],
@@ -1800,7 +1765,7 @@ fn main() {
             expect![[r#"
                 lc ssss [type+local]
                 st S [type]
-                st S []
+                st S [type]
                 fn foo(…) []
                 fn main() []
             "#]],
@@ -1839,7 +1804,9 @@ fn main() {
                 st S []
                 st &S [type]
                 st S []
+                st &S [type]
                 st T []
+                st &T [type]
                 fn foo(…) []
                 fn main() []
                 md core []
@@ -1885,7 +1852,9 @@ fn main() {
                 st S []
                 st &mut S [type]
                 st S []
+                st &mut S [type]
                 st T []
+                st &mut T [type]
                 fn foo(…) []
                 fn main() []
                 md core []
@@ -1924,7 +1893,7 @@ fn bar(t: Foo) {}
             expect![[r#"
                 ev Foo::A [type]
                 ev Foo::B [type]
-                en Foo []
+                en Foo [type]
                 fn bar(…) []
                 fn foo() []
             "#]],
@@ -1947,6 +1916,7 @@ fn bar(t: &Foo) {}
                 ev Foo::B []
                 ev &Foo::B [type]
                 en Foo []
+                en &Foo [type]
                 fn bar(…) []
                 fn foo() []
             "#]],
@@ -1980,7 +1950,9 @@ fn main() {
                 st S []
                 st &S [type]
                 st S []
+                st &S [type]
                 st T []
+                st &T [type]
                 fn bar() []
                 fn &bar() [type]
                 fn foo(…) []
@@ -2189,8 +2161,8 @@ fn foo() {
                 lc foo [type+local]
                 ev Foo::A(…) [type_could_unify]
                 ev Foo::B [type_could_unify]
+                en Foo [type_could_unify]
                 fn foo() []
-                en Foo []
                 fn bar() []
                 fn baz() []
             "#]],
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index ff84aa8742ea4..d23ed71fdcc65 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -9,9 +9,7 @@ use syntax::{AstNode, SmolStr};
 use crate::{
     context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
     item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
-    render::{
-        compute_exact_name_match, compute_function_type_match, compute_ref_match, RenderContext,
-    },
+    render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext},
     CallableSnippets,
 };
 
@@ -81,8 +79,30 @@ fn render(
         .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
         .map_or(false, |trait_| completion.is_ops_trait(trait_));
 
+    let (has_dot_receiver, has_call_parens, cap) = match func_kind {
+        FuncKind::Function(&PathCompletionCtx {
+            kind: PathKind::Expr { .. },
+            has_call_parens,
+            ..
+        }) => (false, has_call_parens, ctx.completion.config.snippet_cap),
+        FuncKind::Method(&DotAccess { kind: DotAccessKind::Method { has_parens }, .. }, _) => {
+            (true, has_parens, ctx.completion.config.snippet_cap)
+        }
+        FuncKind::Method(DotAccess { kind: DotAccessKind::Field { .. }, .. }, _) => {
+            (true, false, ctx.completion.config.snippet_cap)
+        }
+        _ => (false, false, None),
+    };
+    let complete_call_parens = cap
+        .filter(|_| !has_call_parens)
+        .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?)));
+
     item.set_relevance(CompletionRelevance {
-        type_match: compute_function_type_match(completion, &func),
+        type_match: if has_call_parens || complete_call_parens.is_some() {
+            compute_type_match(completion, &ret_type)
+        } else {
+            compute_type_match(completion, &func.ty(db))
+        },
         exact_name_match: compute_exact_name_match(completion, &call),
         is_op_method,
         ..ctx.completion_relevance()
@@ -112,42 +132,9 @@ fn render(
         .detail(detail)
         .lookup_by(name.unescaped().to_smol_str());
 
-    match ctx.completion.config.snippet_cap {
-        Some(cap) => {
-            let complete_params = match func_kind {
-                FuncKind::Function(PathCompletionCtx {
-                    kind: PathKind::Expr { .. },
-                    has_call_parens: false,
-                    ..
-                }) => Some(false),
-                FuncKind::Method(
-                    DotAccess {
-                        kind:
-                            DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
-                        ..
-                    },
-                    _,
-                ) => Some(true),
-                _ => None,
-            };
-            if let Some(has_dot_receiver) = complete_params {
-                if let Some((self_param, params)) =
-                    params(ctx.completion, func, &func_kind, has_dot_receiver)
-                {
-                    add_call_parens(
-                        &mut item,
-                        completion,
-                        cap,
-                        call,
-                        escaped_call,
-                        self_param,
-                        params,
-                    );
-                }
-            }
-        }
-        _ => (),
-    };
+    if let Some((cap, (self_param, params))) = complete_call_parens {
+        add_call_parens(&mut item, completion, cap, call, escaped_call, self_param, params);
+    }
 
     match ctx.import_to_add {
         Some(import_to_add) => {
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index be5b7f8a3404e..e6969c8db300c 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -26,22 +26,22 @@ fn baz() {
             "#,
         // This should not contain `FooDesc {…}`.
         expect![[r#"
-            ct CONST
-            en Enum
+            ct CONST         Unit
+            en Enum          Enum
             fn baz()         fn()
             fn create_foo(…) fn(&FooDesc)
             fn function()    fn()
             ma makro!(…)     macro_rules! makro
             md _69latrick
             md module
-            sc STATIC
-            st FooDesc
-            st Record
-            st Tuple
-            st Unit
-            un Union
+            sc STATIC        Unit
+            st FooDesc       FooDesc
+            st Record        Record
+            st Tuple         Tuple
+            st Unit          Unit
+            un Union         Union
             ev TupleV(…)     TupleV(u32)
-            bt u32
+            bt u32           u32
             kw crate::
             kw false
             kw for
@@ -83,7 +83,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
             lc param0     (i32, i32)
             lc param1     i32
             lc param2     i32
-            bt u32
+            bt u32        u32
             kw crate::
             kw false
             kw for
@@ -117,24 +117,24 @@ impl Unit {
 "#,
         // `self` is in here twice, once as the module, once as the local
         expect![[r#"
-            ct CONST
+            ct CONST        Unit
             cp CONST_PARAM
-            en Enum
+            en Enum         Enum
             fn function()   fn()
             fn local_func() fn()
             lc self         Unit
             ma makro!(…)    macro_rules! makro
             md module
             md qualified
-            sp Self
-            sc STATIC
-            st Record
-            st Tuple
-            st Unit
+            sp Self         Unit
+            sc STATIC       Unit
+            st Record       Record
+            st Tuple        Tuple
+            st Unit         Unit
             tp TypeParam
-            un Union
+            un Union        Union
             ev TupleV(…)    TupleV(u32)
-            bt u32
+            bt u32          u32
             kw const
             kw crate::
             kw enum
@@ -181,18 +181,18 @@ impl Unit {
 }
 "#,
         expect![[r#"
-            ct CONST
-            en Enum
+            ct CONST      Unit
+            en Enum       Enum
             fn function() fn()
             ma makro!(…)  macro_rules! makro
             md module
             md qualified
-            sc STATIC
-            st Record
-            st Tuple
-            st Unit
+            sc STATIC     Unit
+            st Record     Record
+            st Tuple      Tuple
+            st Unit       Unit
             tt Trait
-            un Union
+            un Union      Union
             ev TupleV(…)  TupleV(u32)
             ?? Unresolved
         "#]],
@@ -211,7 +211,7 @@ fn complete_in_block() {
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw enum
@@ -256,7 +256,7 @@ fn complete_after_if_expr() {
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -304,7 +304,7 @@ fn complete_in_match_arm() {
 "#,
         expect![[r#"
             fn foo()     fn()
-            bt u32
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -328,7 +328,7 @@ fn completes_in_loop_ctx() {
         r"fn my() { loop { $0 } }",
         expect![[r#"
             fn my()        fn()
-            bt u32
+            bt u32         u32
             kw break
             kw const
             kw continue
@@ -370,7 +370,7 @@ fn completes_in_let_initializer() {
         r#"fn main() { let _ = $0 }"#,
         expect![[r#"
             fn main()    fn()
-            bt u32
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -403,8 +403,8 @@ fn foo() {
 "#,
         expect![[r#"
             fn foo()     fn()
-            st Foo
-            bt u32
+            st Foo       Foo
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -439,7 +439,7 @@ fn foo() {
         expect![[r#"
             fn foo()     fn()
             lc bar       i32
-            bt u32
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -470,7 +470,7 @@ fn quux(x: i32) {
             fn quux(…)   fn(i32)
             lc x         i32
             ma m!(…)     macro_rules! m
-            bt u32
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -497,7 +497,7 @@ fn quux(x: i32) {
             fn quux(…)   fn(i32)
             lc x         i32
             ma m!(…)     macro_rules! m
-            bt u32
+            bt u32       u32
             kw crate::
             kw false
             kw for
@@ -683,11 +683,11 @@ fn brr() {
 }
 "#,
         expect![[r#"
-            en HH
+            en HH              HH
             fn brr()           fn()
-            st YoloVariant
+            st YoloVariant     YoloVariant
             st YoloVariant {…} YoloVariant { f: usize }
-            bt u32
+            bt u32             u32
             kw crate::
             kw false
             kw for
@@ -749,7 +749,7 @@ fn foo() { if foo {} $0 }
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -789,7 +789,7 @@ fn foo() { if foo {} el$0 }
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -829,7 +829,7 @@ fn foo() { bar(if foo {} $0) }
 "#,
         expect![[r#"
             fn foo()     fn()
-            bt u32
+            bt u32       u32
             kw crate::
             kw else
             kw else if
@@ -853,7 +853,7 @@ fn foo() { bar(if foo {} el$0) }
 "#,
         expect![[r#"
             fn foo()     fn()
-            bt u32
+            bt u32       u32
             kw crate::
             kw else
             kw else if
@@ -877,7 +877,7 @@ fn foo() { if foo {} $0 let x = 92; }
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -917,7 +917,7 @@ fn foo() { if foo {} el$0 let x = 92; }
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -957,7 +957,7 @@ fn foo() { if foo {} el$0 { let x = 92; } }
 "#,
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw else
@@ -1009,7 +1009,7 @@ pub struct UnstableThisShouldNotBeListed;
         expect![[r#"
             fn main()      fn()
             md std
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw enum
@@ -1060,8 +1060,8 @@ pub struct UnstableButWeAreOnNightlyAnyway;
         expect![[r#"
             fn main()                 fn()
             md std
-            st UnstableButWeAreOnNightlyAnyway
-            bt u32
+            st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
+            bt u32                    u32
             kw const
             kw crate::
             kw enum
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 21f693d79f1db..9a4a94a24566c 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -139,10 +139,10 @@ fn main() {
 }
 "#,
         expect![[r#"
-            st Rc (use dep::Rc)
-            st Rcar (use dep::Rcar)
-            st Rc (use dep::some_module::Rc)
-            st Rcar (use dep::some_module::Rcar)
+            st Rc (use dep::Rc)       Rc
+            st Rcar (use dep::Rcar)   Rcar
+            st Rc (use dep::some_module::Rc) Rc
+            st Rcar (use dep::some_module::Rcar) Rcar
         "#]],
     );
     check(
@@ -165,12 +165,12 @@ fn main() {
 }
 "#,
         expect![[r#"
-            ct RC (use dep::RC)
-            st Rc (use dep::Rc)
-            st Rcar (use dep::Rcar)
-            ct RC (use dep::some_module::RC)
-            st Rc (use dep::some_module::Rc)
-            st Rcar (use dep::some_module::Rcar)
+            ct RC (use dep::RC)       ()
+            st Rc (use dep::Rc)       Rc
+            st Rcar (use dep::Rcar)   Rcar
+            ct RC (use dep::some_module::RC) ()
+            st Rc (use dep::some_module::Rc) Rc
+            st Rcar (use dep::some_module::Rcar) Rcar
         "#]],
     );
     check(
@@ -193,8 +193,8 @@ fn main() {
 }
 "#,
         expect![[r#"
-            ct RC (use dep::RC)
-            ct RC (use dep::some_module::RC)
+            ct RC (use dep::RC)       ()
+            ct RC (use dep::some_module::RC) ()
         "#]],
     );
 }
@@ -227,10 +227,10 @@ fn main() {
 }
 "#,
         expect![[r#"
-                st ThirdStruct (use dep::some_module::ThirdStruct)
-                st AfterThirdStruct (use dep::some_module::AfterThirdStruct)
-                st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct)
-            "#]],
+            st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct
+            st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct
+            st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct
+        "#]],
     );
 }
 
@@ -309,7 +309,7 @@ fn trait_const_fuzzy_completion() {
     check(
         fixture,
         expect![[r#"
-            ct SPECIAL_CONST (use dep::test_mod::TestTrait)
+            ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8
         "#]],
     );
 
@@ -597,7 +597,7 @@ fn main() {
 }
 "#,
         expect![[r#"
-            ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
+            ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
             fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
         "#]],
     );
@@ -717,7 +717,7 @@ fn main() {
     check(
         fixture,
         expect![[r#"
-        st Item (use foo::bar::baz::Item)
+            st Item (use foo::bar::baz::Item) Item
         "#]],
     );
 
@@ -759,7 +759,7 @@ fn main() {
     check(
         fixture,
         expect![[r#"
-        ct TEST_ASSOC (use foo::Item)
+            ct TEST_ASSOC (use foo::Item) usize
         "#]],
     );
 
@@ -803,8 +803,8 @@ fn main() {
     check(
         fixture,
         expect![[r#"
-        ct TEST_ASSOC (use foo::bar::Item)
-    "#]],
+            ct TEST_ASSOC (use foo::bar::Item) usize
+        "#]],
     );
 
     check_edit(
@@ -897,7 +897,7 @@ fn main() {
     TES$0
 }"#,
         expect![[r#"
-            ct TEST_CONST (use foo::TEST_CONST)
+            ct TEST_CONST (use foo::TEST_CONST) usize
         "#]],
     );
 
@@ -914,7 +914,7 @@ fn main() {
     tes$0
 }"#,
         expect![[r#"
-            ct TEST_CONST (use foo::TEST_CONST)
+            ct TEST_CONST (use foo::TEST_CONST) usize
             fn test_function() (use foo::test_function) fn() -> i32
         "#]],
     );
@@ -1138,8 +1138,8 @@ mod mud {
 }
 "#,
         expect![[r#"
-                st Struct (use crate::Struct)
-            "#]],
+            st Struct (use crate::Struct) Struct
+        "#]],
     );
 }
 
@@ -1250,7 +1250,7 @@ enum Foo {
 }
 }"#,
         expect![[r#"
-            st Barbara (use foo::Barbara)
+            st Barbara (use foo::Barbara) Barbara
         "#]],
     )
 }
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index 3ef2a7c942bce..de3fd05189f37 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -18,15 +18,15 @@ fn target_type_or_trait_in_impl_block() {
 impl Tra$0
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -40,15 +40,15 @@ fn target_type_in_trait_impl_block() {
 impl Trait for Str$0
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs
index b2e8274a84d7d..67cf551fce84c 100644
--- a/crates/ide-completion/src/tests/pattern.rs
+++ b/crates/ide-completion/src/tests/pattern.rs
@@ -435,7 +435,7 @@ fn foo() {
 }
 "#,
         expect![[r#"
-            st Bar
+            st Bar     Bar
             kw crate::
             kw self::
         "#]],
@@ -450,7 +450,7 @@ fn foo() {
 }
 "#,
         expect![[r#"
-            st Foo
+            st Foo     Foo
             kw crate::
             kw self::
         "#]],
diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs
index 789ad66345b12..46a3e97d3e92d 100644
--- a/crates/ide-completion/src/tests/predicate.rs
+++ b/crates/ide-completion/src/tests/predicate.rs
@@ -16,16 +16,16 @@ fn predicate_start() {
 struct Foo<'lt, T, const C: usize> where $0 {}
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            st Foo<…>    Foo<'_, {unknown}, _>
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -89,16 +89,16 @@ fn param_list_for_for_pred() {
 struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            st Foo<…>    Foo<'_, {unknown}, _>
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -114,16 +114,16 @@ impl Record {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            sp Self
-            st Record
-            st Tuple
-            st Unit
+            sp Self      Record
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index 65cefdb0856d2..18afde1b7cefd 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -186,10 +186,10 @@ fn main() {
             lc foo                  Foo
             lc thing                i32
             md core
-            st Foo
+            st Foo                  Foo
             st Foo {…}              Foo { foo1: u32, foo2: u32 }
             tt Default
-            bt u32
+            bt u32                  u32
             kw crate::
             kw self::
         "#]],
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index d3dbd7cc22777..fe5634712aaa5 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -84,10 +84,10 @@ pub mod prelude {
 }
 "#,
         expect![[r#"
-                md std
-                st Option
-                bt u32
-            "#]],
+            md std
+            st Option Option
+            bt u32    u32
+        "#]],
     );
 }
 
@@ -112,11 +112,11 @@ mod macros {
 }
 "#,
         expect![[r#"
-                fn f()        fn()
-                ma concat!(…) macro_rules! concat
-                md std
-                bt u32
-            "#]],
+            fn f()        fn()
+            ma concat!(…) macro_rules! concat
+            md std
+            bt u32        u32
+        "#]],
     );
 }
 
@@ -142,11 +142,11 @@ pub mod prelude {
 }
 "#,
         expect![[r#"
-                md core
-                md std
-                st String
-                bt u32
-            "#]],
+            md core
+            md std
+            st String String
+            bt u32    u32
+        "#]],
     );
 }
 
@@ -171,10 +171,10 @@ pub mod prelude {
 }
             "#,
         expect![[r#"
-                fn f() fn()
-                md std
-                bt u32
-            "#]],
+            fn f() fn()
+            md std
+            bt u32 u32
+        "#]],
     );
 }
 
@@ -446,10 +446,10 @@ mod p {
 }
 "#,
         expect![[r#"
-                ct RIGHT_CONST
-                fn right_fn()  fn()
-                st RightType
-            "#]],
+            ct RIGHT_CONST u32
+            fn right_fn()  fn()
+            st RightType   WrongType
+        "#]],
     );
 
     check_edit(
@@ -881,7 +881,7 @@ fn main() {
             fn main() fn()
             lc foobar i32
             ma x!(…)  macro_rules! x
-            bt u32
+            bt u32    u32
         "#]],
     )
 }
@@ -1008,8 +1008,8 @@ fn here_we_go() {
 "#,
         expect![[r#"
             fn here_we_go()    fn()
-            st Foo (alias Bar)
-            bt u32
+            st Foo (alias Bar) Foo
+            bt u32             u32
             kw const
             kw crate::
             kw enum
@@ -1057,8 +1057,8 @@ fn here_we_go() {
 "#,
         expect![[r#"
             fn here_we_go()           fn()
-            st Foo (alias Bar, Qux, Baz)
-            bt u32
+            st Foo (alias Bar, Qux, Baz) Foo
+            bt u32                    u32
             kw const
             kw crate::
             kw enum
@@ -1178,7 +1178,7 @@ fn bar() { qu$0 }
         expect![[r#"
             fn bar()             fn()
             fn foo() (alias qux) fn()
-            bt u32
+            bt u32               u32
             kw const
             kw crate::
             kw enum
@@ -1227,7 +1227,7 @@ fn here_we_go() {
 }
 "#,
         expect![[r#"
-            st Bar (alias Qux)
+            st Bar (alias Qux) Bar
         "#]],
     );
 }
@@ -1246,7 +1246,7 @@ fn here_we_go() {
 }
 "#,
         expect![[r#"
-            st Bar (alias Qux)
+            st Bar (alias Qux) Bar
         "#]],
     );
 }
@@ -1267,8 +1267,8 @@ fn here_we_go() {
         expect![[r#"
             fn here_we_go()           fn()
             md foo
-            st Bar (alias Qux) (use foo::Bar)
-            bt u32
+            st Bar (alias Qux) (use foo::Bar) Bar
+            bt u32                    u32
             kw crate::
             kw false
             kw for
@@ -1409,7 +1409,7 @@ fn foo() {
         Some('_'),
         expect![[r#"
             fn foo()       fn()
-            bt u32
+            bt u32         u32
             kw const
             kw crate::
             kw enum
@@ -1461,7 +1461,7 @@ fn foo(_: a_$0) { }
 "#,
         Some('_'),
         expect![[r#"
-            bt u32
+            bt u32     u32
             kw crate::
             kw self::
         "#]],
@@ -1475,7 +1475,7 @@ fn foo() {
         Some('_'),
         expect![[r#"
             tp T
-            bt u32
+            bt u32     u32
             kw crate::
             kw self::
         "#]],
diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs
index d518dd7641020..c7161f82ce74f 100644
--- a/crates/ide-completion/src/tests/type_pos.rs
+++ b/crates/ide-completion/src/tests/type_pos.rs
@@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            sp Self
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            sp Self      Foo<'_, {unknown}, _>
+            st Foo<…>    Foo<'_, {unknown}, _>
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -42,18 +42,18 @@ fn tuple_struct_field() {
 struct Foo<'lt, T, const C: usize>(f$0);
 "#,
         expect![[r#"
-            en Enum
+            en Enum       Enum
             ma makro!(…)  macro_rules! makro
             md module
-            sp Self
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            sp Self       Foo<'_, {unknown}, _>
+            st Foo<…>     Foo<'_, {unknown}, _>
+            st Record     Record
+            st Tuple      Tuple
+            st Unit       Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union      Union
+            bt u32        u32
             kw crate::
             kw pub
             kw pub(crate)
@@ -70,16 +70,16 @@ fn fn_return_type() {
 fn x<'lt, T, const C: usize>() -> $0
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -100,19 +100,19 @@ fn foo() -> B$0 {
 }
 "#,
         expect![[r#"
-        en Enum
-        ma makro!(…) macro_rules! makro
-        md module
-        st Record
-        st Tuple
-        st Unit
-        tt Trait
-        un Union
-        bt u32
-        it ()
-        kw crate::
-        kw self::
-    "#]],
+            en Enum      Enum
+            ma makro!(…) macro_rules! makro
+            md module
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
+            tt Trait
+            un Union     Union
+            bt u32       u32
+            it ()
+            kw crate::
+            kw self::
+        "#]],
     )
 }
 
@@ -124,16 +124,16 @@ struct Foo(T);
 const FOO: $0 = Foo(2);
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            st Foo<…>    Foo<{unknown}>
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it Foo
             kw crate::
             kw self::
@@ -151,15 +151,15 @@ fn f2() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it i32
             kw crate::
             kw self::
@@ -179,15 +179,15 @@ fn f2() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it u64
             kw crate::
             kw self::
@@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it u64
             kw crate::
             kw self::
@@ -230,15 +230,15 @@ fn f2(x: $0) {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it i32
             kw crate::
             kw self::
@@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum                Enum
             ma makro!(…)           macro_rules! makro
             md a
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record              Record
+            st Tuple               Tuple
+            st Unit                Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union               Union
+            bt u32                 u32
             it a::Foo>
             kw crate::
             kw self::
@@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Foo<…>
-            st Record
-            st Tuple
-            st Unit
+            st Foo<…>    Foo<{unknown}>
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             it Foo
             kw crate::
             kw self::
@@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tp T
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() {
 }
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
-            un Union
+            un Union     Union
         "#]],
     );
 }
@@ -384,18 +384,18 @@ trait Trait2: Trait1 {
 fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
 "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tt Trait1
             tt Trait2
             tp T
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -409,15 +409,15 @@ trait Trait2 {
 fn foo<'lt, T: Trait2, const CONST_PARAM: usize>(_: T) {}
     "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            st Record
-            st Tuple
-            st Unit
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
             tt Trait
             tt Trait2
-            un Union
+            un Union     Union
         "#]],
     );
 }
@@ -434,18 +434,18 @@ trait Tr {
 impl Tr<$0
     "#,
         expect![[r#"
-            en Enum
+            en Enum      Enum
             ma makro!(…) macro_rules! makro
             md module
-            sp Self
-            st Record
-            st S
-            st Tuple
-            st Unit
+            sp Self      dyn Tr<{unknown}>
+            st Record    Record
+            st S         S
+            st Tuple     Tuple
+            st Unit      Unit
             tt Tr
             tt Trait
-            un Union
-            bt u32
+            un Union     Union
+            bt u32       u32
             kw crate::
             kw self::
         "#]],
@@ -481,16 +481,16 @@ trait MyTrait {
 fn f(t: impl MyTrait {
 fn f(t: impl MyTrait {
 fn f(t: impl MyTrait {
 fn f(t: impl MyTrait = ()>) {}
             "#,
         expect![[r#"
-                en Enum
-                ma makro!(…) macro_rules! makro
-                md module
-                st Foo
-                st Record
-                st Tuple
-                st Unit
-                tt Bar
-                tt Trait
-                un Union
-                bt u32
-                kw crate::
-                kw self::
-            "#]],
+            en Enum      Enum
+            ma makro!(…) macro_rules! makro
+            md module
+            st Foo       Foo
+            st Record    Record
+            st Tuple     Tuple
+            st Unit      Unit
+            tt Bar
+            tt Trait
+            un Union     Union
+            bt u32       u32
+            kw crate::
+            kw self::
+        "#]],
     );
     check(
         r#"
@@ -853,12 +853,12 @@ fn completes_const_and_type_generics_separately() {
     fn foo = ()>>() {}
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Type generic params
@@ -871,12 +871,12 @@ fn completes_const_and_type_generics_separately() {
     }
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Type alias generic params
@@ -890,12 +890,12 @@ fn completes_const_and_type_generics_separately() {
     }
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Enum variant params
@@ -908,12 +908,12 @@ fn completes_const_and_type_generics_separately() {
     }
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Trait params
@@ -924,12 +924,12 @@ fn completes_const_and_type_generics_separately() {
     impl Foo<(), $0> for () {}
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Trait alias params
@@ -942,12 +942,12 @@ fn completes_const_and_type_generics_separately() {
     fn foo>() {}
             "#,
         expect![[r#"
-                ct CONST
-                ct X
-                ma makro!(…) macro_rules! makro
-                kw crate::
-                kw self::
-            "#]],
+            ct CONST     Unit
+            ct X         usize
+            ma makro!(…) macro_rules! makro
+            kw crate::
+            kw self::
+        "#]],
     );
 
     // Omitted lifetime params
@@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
 fn foo<'a>() { S::; }
         "#,
         expect![[r#"
-            ct CONST
+            ct CONST     Unit
             ma makro!(…) macro_rules! makro
             kw crate::
             kw self::
@@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
 fn foo<'a>() { S::<'static, 'static, F$0, _>; }
         "#,
         expect![[r#"
-            ct CONST
+            ct CONST     Unit
             ma makro!(…) macro_rules! makro
             kw crate::
             kw self::
diff --git a/crates/ide-completion/src/tests/use_tree.rs b/crates/ide-completion/src/tests/use_tree.rs
index 4c74dba526b6f..167bdec546d63 100644
--- a/crates/ide-completion/src/tests/use_tree.rs
+++ b/crates/ide-completion/src/tests/use_tree.rs
@@ -65,7 +65,7 @@ use self::{foo::*, bar$0};
 "#,
         expect![[r#"
             md foo
-            st S
+            st S   S
         "#]],
     );
 }
@@ -82,7 +82,7 @@ mod foo {
 use foo::{bar::$0}
 "#,
         expect![[r#"
-            st FooBar
+            st FooBar FooBar
         "#]],
     );
     check(
@@ -115,7 +115,7 @@ mod foo {
 use foo::{bar::{baz::$0}}
 "#,
         expect![[r#"
-            st FooBarBaz
+            st FooBarBaz FooBarBaz
         "#]],
     );
     check(
@@ -152,7 +152,7 @@ struct Bar;
 "#,
         expect![[r#"
             ma foo macro_rules! foo_
-            st Foo
+            st Foo Foo
         "#]],
     );
 }
@@ -193,7 +193,7 @@ struct Bar;
 "#,
         expect![[r#"
             md foo
-            st Bar
+            st Bar Bar
         "#]],
     );
 }
@@ -212,7 +212,7 @@ struct Bar;
         expect![[r#"
             md bar
             md foo
-            st Bar
+            st Bar Bar
         "#]],
     );
 }
@@ -230,7 +230,7 @@ mod a {
 }
 "#,
         expect![[r#"
-            ct A
+            ct A       usize
             md b
             kw super::
         "#]],
@@ -248,7 +248,7 @@ struct Bar;
 "#,
         expect![[r#"
             md foo
-            st Bar
+            st Bar Bar
         "#]],
     );
 }
@@ -265,7 +265,7 @@ pub mod foo {}
 "#,
         expect![[r#"
             md foo
-            st Foo
+            st Foo Foo
         "#]],
     );
 }
@@ -425,7 +425,7 @@ marco_rules! m { () => {} }
         expect![[r#"
             fn foo  fn()
             md simd
-            st S
+            st S    S
         "#]],
     );
 }

From 5d951a6a461b091c9d443d3d138905563868e141 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 15:26:38 +0100
Subject: [PATCH 72/80] fix: Fix token downmapping being quadratic

---
 crates/hir-expand/src/lib.rs |   8 +-
 crates/hir/src/semantics.rs  | 287 +++++++++++++++++++----------------
 2 files changed, 164 insertions(+), 131 deletions(-)

diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index a159cf92a7ac3..74089593ac035 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -605,8 +605,8 @@ pub struct ExpansionInfo {
 }
 
 impl ExpansionInfo {
-    pub fn expanded(&self) -> InFile {
-        self.expanded.clone().into()
+    pub fn expanded(&self) -> InMacroFile {
+        self.expanded.clone()
     }
 
     pub fn call_node(&self) -> Option> {
@@ -617,13 +617,13 @@ impl ExpansionInfo {
     pub fn map_range_down<'a>(
         &'a self,
         span: SpanData,
-    ) -> Option> + 'a> {
+    ) -> Option + 'a>> {
         let tokens = self
             .exp_map
             .ranges_with_span(span)
             .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
 
-        Some(tokens.map(move |token| InMacroFile::new(self.expanded.file_id, token)))
+        Some(InMacroFile::new(self.expanded.file_id, tokens))
     }
 
     /// Looks up the span at the given offset.
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 46835ec04e01b..49a63db19622d 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -5,7 +5,7 @@ mod source_to_def;
 use std::{
     cell::RefCell,
     fmt, iter, mem,
-    ops::{self, ControlFlow},
+    ops::{self, ControlFlow, Not},
 };
 
 use base_db::{FileId, FileRange};
@@ -20,8 +20,8 @@ use hir_def::{
     AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
 };
 use hir_expand::{
-    db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, MacroCallId, MacroFileId,
-    MacroFileIdExt,
+    db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
+    MacroFileId, MacroFileIdExt,
 };
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -29,7 +29,7 @@ use smallvec::{smallvec, SmallVec};
 use stdx::TupleExt;
 use syntax::{
     algo::skip_trivia_token,
-    ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
+    ast::{self, HasAttrs as _, HasDocComments, HasGenericParams, HasLoopBody, IsString as _},
     match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
     TextRange, TextSize,
 };
@@ -129,9 +129,10 @@ pub struct Semantics<'db, DB> {
 pub struct SemanticsImpl<'db> {
     pub db: &'db dyn HirDatabase,
     s2d_cache: RefCell,
-    expansion_info_cache: RefCell>,
     /// Rootnode to HirFileId cache
     cache: RefCell>,
+    // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+    expansion_info_cache: RefCell>,
     /// MacroCall to its expansion's MacroFileId cache
     macro_call_cache: RefCell, MacroFileId>>,
 }
@@ -616,164 +617,196 @@ impl<'db> SemanticsImpl<'db> {
         res
     }
 
-    // FIXME: should only take real file inputs for simplicity
     fn descend_into_macros_impl(
         &self,
         token: SyntaxToken,
         f: &mut dyn FnMut(InFile) -> ControlFlow<()>,
     ) {
-        // FIXME: Clean this up
         let _p = profile::span("descend_into_macros");
         let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
             Some(it) => it,
             None => return,
         };
 
-        let mut cache = self.expansion_info_cache.borrow_mut();
-        let mut mcache = self.macro_call_cache.borrow_mut();
-        let span = match sa.file_id.repr() {
-            base_db::span::HirFileIdRepr::FileId(file_id) => {
-                self.db.real_span_map(file_id).span_for_range(token.text_range())
+        let span = match sa.file_id.file_id() {
+            Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+            None => {
+                stdx::never!();
+                return;
             }
-            base_db::span::HirFileIdRepr::MacroFile(macro_file) => cache
-                .entry(macro_file)
-                .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
-                .exp_map
-                .span_at(token.text_range().start()),
         };
 
+        let mut cache = self.expansion_info_cache.borrow_mut();
+        let mut mcache = self.macro_call_cache.borrow_mut();
         let def_map = sa.resolver.def_map();
-        let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
 
-        let mut process_expansion_for_token = |stack: &mut SmallVec<_>, macro_file| {
+        let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
             let expansion_info = cache
                 .entry(macro_file)
                 .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
 
             {
-                let InFile { file_id, value } = expansion_info.expanded();
-                self.cache(value, file_id);
+                let InMacroFile { file_id, value } = expansion_info.expanded();
+                self.cache(value, file_id.into());
             }
 
-            let mapped_tokens = expansion_info.map_range_down(span)?;
-            let len = stack.len();
+            let InMacroFile { file_id, value: mapped_tokens } =
+                expansion_info.map_range_down(span)?;
+            let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
 
-            // requeue the tokens we got from mapping our current token down
-            stack.extend(mapped_tokens.map(Into::into));
             // if the length changed we have found a mapping for the token
-            (stack.len() != len).then_some(())
+            let res = mapped_tokens.is_empty().not().then_some(());
+            // requeue the tokens we got from mapping our current token down
+            stack.push((HirFileId::from(file_id), mapped_tokens));
+            res
         };
 
-        // Remap the next token in the queue into a macro call its in, if it is not being remapped
-        // either due to not being in a macro-call or because its unused push it into the result vec,
-        // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
-        while let Some(token) = stack.pop() {
-            let was_not_remapped = (|| {
-                // First expand into attribute invocations
-
-                let containing_attribute_macro_call = self.with_ctx(|ctx| {
-                    token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
-                        if item.attrs().next().is_none() {
-                            // Don't force populate the dyn cache for items that don't have an attribute anyways
-                            return None;
-                        }
-                        Some(ctx.item_to_macro_call(token.with_value(item.clone()))?)
-                    })
-                });
-                if let Some(call_id) = containing_attribute_macro_call {
-                    let file_id = call_id.as_macro_file();
-                    return process_expansion_for_token(&mut stack, file_id);
-                }
+        let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
+
+        while let Some((file_id, mut tokens)) = stack.pop() {
+            while let Some(token) = tokens.pop() {
+                let was_not_remapped = (|| {
+                    // First expand into attribute invocations
+                    let containing_attribute_macro_call = self.with_ctx(|ctx| {
+                        token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+                            if item.attrs().next().is_none() {
+                                // Don't force populate the dyn cache for items that don't have an attribute anyways
+                                return None;
+                            }
+                            Some((
+                                ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
+                                item,
+                            ))
+                        })
+                    });
+                    if let Some((call_id, item)) = containing_attribute_macro_call {
+                        let file_id = call_id.as_macro_file();
+                        let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
+                            hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
+                                invoc_attr_index.ast_index()
+                            }
+                            _ => 0,
+                        };
+                        let text_range = item.syntax().text_range();
+                        let start = item
+                            .doc_comments_and_attrs()
+                            .nth(attr_id)
+                            .map(|attr| match attr {
+                                Either::Left(it) => it.syntax().text_range().start(),
+                                Either::Right(it) => it.syntax().text_range().start(),
+                            })
+                            .unwrap_or_else(|| text_range.start());
+                        let text_range = TextRange::new(start, text_range.end());
+                        // remove any other token in this macro input, all their mappings are the
+                        // same as this one
+                        tokens.retain(|t| !text_range.contains_range(t.text_range()));
+                        return process_expansion_for_token(&mut stack, file_id);
+                    }
 
-                // Then check for token trees, that means we are either in a function-like macro or
-                // secondary attribute inputs
-                let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
-                let parent = tt.syntax().parent()?;
+                    // Then check for token trees, that means we are either in a function-like macro or
+                    // secondary attribute inputs
+                    let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+                    let parent = tt.syntax().parent()?;
 
-                if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
-                    return None;
-                }
-                if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
-                    return None;
-                }
+                    if tt.left_delimiter_token().map_or(false, |it| it == token) {
+                        return None;
+                    }
+                    if tt.right_delimiter_token().map_or(false, |it| it == token) {
+                        return None;
+                    }
 
-                if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
-                    let mcall: hir_expand::files::InFileWrapper =
-                        token.with_value(macro_call);
-                    let file_id = match mcache.get(&mcall) {
-                        Some(&it) => it,
-                        None => {
-                            let it = sa.expand(self.db, mcall.as_ref())?;
-                            mcache.insert(mcall, it);
-                            it
-                        }
-                    };
-                    process_expansion_for_token(&mut stack, file_id)
-                } else if let Some(meta) = ast::Meta::cast(parent) {
-                    // attribute we failed expansion for earlier, this might be a derive invocation
-                    // or derive helper attribute
-                    let attr = meta.parent_attr()?;
-
-                    let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
-                        // this might be a derive, or a derive helper on an ADT
-                        let derive_call = self.with_ctx(|ctx| {
-                            // so try downmapping the token into the pseudo derive expansion
-                            // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
-                            ctx.attr_to_derive_macro_call(
-                                token.with_value(&adt),
-                                token.with_value(attr.clone()),
-                            )
-                            .map(|(_, call_id, _)| call_id)
-                        });
-
-                        match derive_call {
-                            Some(call_id) => {
-                                // resolved to a derive
-                                let file_id = call_id.as_macro_file();
-                                return process_expansion_for_token(&mut stack, file_id);
+                    if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+                        let mcall: hir_expand::files::InFileWrapper =
+                            InFile::new(file_id, macro_call);
+                        let file_id = match mcache.get(&mcall) {
+                            Some(&it) => it,
+                            None => {
+                                let it = sa.expand(self.db, mcall.as_ref())?;
+                                mcache.insert(mcall, it);
+                                it
                             }
-                            None => Some(adt),
-                        }
-                    } else {
-                        // Otherwise this could be a derive helper on a variant or field
-                        if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
-                        {
-                            field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
-                        } else if let Some(field) =
-                            attr.syntax().parent().and_then(ast::TupleField::cast)
-                        {
-                            field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
-                        } else if let Some(variant) =
-                            attr.syntax().parent().and_then(ast::Variant::cast)
+                        };
+                        let text_range = tt.syntax().text_range();
+                        // remove any other token in this macro input, all their mappings are the
+                        // same as this one
+                        tokens.retain(|t| !text_range.contains_range(t.text_range()));
+                        process_expansion_for_token(&mut stack, file_id)
+                    } else if let Some(meta) = ast::Meta::cast(parent) {
+                        // attribute we failed expansion for earlier, this might be a derive invocation
+                        // or derive helper attribute
+                        let attr = meta.parent_attr()?;
+
+                        let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
                         {
-                            variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+                            // this might be a derive, or a derive helper on an ADT
+                            let derive_call = self.with_ctx(|ctx| {
+                                // so try downmapping the token into the pseudo derive expansion
+                                // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+                                ctx.attr_to_derive_macro_call(
+                                    InFile::new(file_id, &adt),
+                                    InFile::new(file_id, attr.clone()),
+                                )
+                                .map(|(_, call_id, _)| call_id)
+                            });
+
+                            match derive_call {
+                                Some(call_id) => {
+                                    // resolved to a derive
+                                    let file_id = call_id.as_macro_file();
+                                    let text_range = attr.syntax().text_range();
+                                    // remove any other token in this macro input, all their mappings are the
+                                    // same as this one
+                                    tokens.retain(|t| !text_range.contains_range(t.text_range()));
+                                    return process_expansion_for_token(&mut stack, file_id);
+                                }
+                                None => Some(adt),
+                            }
                         } else {
-                            None
+                            // Otherwise this could be a derive helper on a variant or field
+                            if let Some(field) =
+                                attr.syntax().parent().and_then(ast::RecordField::cast)
+                            {
+                                field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+                            } else if let Some(field) =
+                                attr.syntax().parent().and_then(ast::TupleField::cast)
+                            {
+                                field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+                            } else if let Some(variant) =
+                                attr.syntax().parent().and_then(ast::Variant::cast)
+                            {
+                                variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+                            } else {
+                                None
+                            }
+                        }?;
+                        if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
+                            return None;
                         }
-                    }?;
-                    if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
-                        return None;
-                    }
-                    // Not an attribute, nor a derive, so it's either a builtin or a derive helper
-                    // Try to resolve to a derive helper and downmap
-                    let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
-                    let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
-                    let helpers =
-                        def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
-                    let mut res = None;
-                    for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
-                        res =
-                            res.or(process_expansion_for_token(&mut stack, derive.as_macro_file()));
+                        // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+                        // Try to resolve to a derive helper and downmap
+                        let attr_name =
+                            attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+                        let id = self.db.ast_id_map(file_id).ast_id(&adt);
+                        let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
+                        let mut res = None;
+                        for (.., derive) in
+                            helpers.iter().filter(|(helper, ..)| *helper == attr_name)
+                        {
+                            res = res.or(process_expansion_for_token(
+                                &mut stack,
+                                derive.as_macro_file(),
+                            ));
+                        }
+                        res
+                    } else {
+                        None
                     }
-                    res
-                } else {
-                    None
-                }
-            })()
-            .is_none();
+                })()
+                .is_none();
 
-            if was_not_remapped && f(token).is_break() {
-                break;
+                if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
+                    break;
+                }
             }
         }
     }

From 35fbc0210c3372efea8913312ae1ca187c3fa23c Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 16:36:41 +0100
Subject: [PATCH 73/80] Fallback to method resolution on unresolved field
 access with matching method name

---
 crates/hir-ty/src/infer/expr.rs   | 95 ++++++++++++++++++++-----------
 crates/hir/src/semantics.rs       | 32 +++++------
 crates/hir/src/source_analyzer.rs | 47 +++++++++------
 crates/ide-db/src/defs.rs         | 11 ++--
 crates/ide/src/hover/tests.rs     | 27 +++++++++
 5 files changed, 140 insertions(+), 72 deletions(-)

diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 24026202b74d6..a5e77a12d8c50 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -579,7 +579,7 @@ impl InferenceContext<'_> {
                 }
                 ty
             }
-            Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
+            Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name, expected),
             Expr::Await { expr } => {
                 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
                 self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@@ -1456,7 +1456,13 @@ impl InferenceContext<'_> {
         })
     }
 
-    fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
+    fn infer_field_access(
+        &mut self,
+        tgt_expr: ExprId,
+        receiver: ExprId,
+        name: &Name,
+        expected: &Expectation,
+    ) -> Ty {
         let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
 
         if name.is_missing() {
@@ -1482,28 +1488,42 @@ impl InferenceContext<'_> {
                 ty
             }
             None => {
-                // no field found,
-                let method_with_same_name_exists = {
-                    self.get_traits_in_scope();
-
-                    let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
-                    method_resolution::lookup_method(
-                        self.db,
-                        &canonicalized_receiver.value,
-                        self.table.trait_env.clone(),
-                        self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
-                        VisibleFromModule::Filter(self.resolver.module()),
-                        name,
-                    )
-                    .is_some()
-                };
+                // no field found, lets attempt to resolve it like a function so that IDE things
+                // work out while people are typing
+                let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+                let resolved = method_resolution::lookup_method(
+                    self.db,
+                    &canonicalized_receiver.value,
+                    self.table.trait_env.clone(),
+                    self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+                    VisibleFromModule::Filter(self.resolver.module()),
+                    name,
+                );
                 self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
                     expr: tgt_expr,
-                    receiver: receiver_ty,
+                    receiver: receiver_ty.clone(),
                     name: name.clone(),
-                    method_with_same_name_exists,
+                    method_with_same_name_exists: resolved.is_some(),
                 });
-                self.err_ty()
+                match resolved {
+                    Some((adjust, func, _)) => {
+                        let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+                        let generics = generics(self.db.upcast(), func.into());
+                        let substs = self.substs_for_method_call(generics, None);
+                        self.write_expr_adj(receiver, adjustments);
+                        self.write_method_resolution(tgt_expr, func, substs.clone());
+
+                        self.check_method_call(
+                            tgt_expr,
+                            &[],
+                            self.db.value_ty(func.into()),
+                            substs,
+                            ty,
+                            expected,
+                        )
+                    }
+                    None => self.err_ty(),
+                }
             }
         }
     }
@@ -1517,7 +1537,7 @@ impl InferenceContext<'_> {
         generic_args: Option<&GenericArgs>,
         expected: &Expectation,
     ) -> Ty {
-        let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+        let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
         let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
 
         let resolved = method_resolution::lookup_method(
@@ -1568,23 +1588,32 @@ impl InferenceContext<'_> {
                 )
             }
         };
+        self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
+    }
+
+    fn check_method_call(
+        &mut self,
+        tgt_expr: ExprId,
+        args: &[ExprId],
+        method_ty: Binders,
+        substs: Substitution,
+        receiver_ty: Ty,
+        expected: &Expectation,
+    ) -> Ty {
         let method_ty = method_ty.substitute(Interner, &substs);
         self.register_obligations_for_call(&method_ty);
-        let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+        let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
             match method_ty.callable_sig(self.db) {
-                Some(sig) => {
+                Some(sig) => (
                     if !sig.params().is_empty() {
-                        (
-                            sig.params()[0].clone(),
-                            sig.params()[1..].to_vec(),
-                            sig.ret().clone(),
-                            sig.is_varargs,
-                        )
+                        (sig.params()[0].clone(), sig.params()[1..].to_vec())
                     } else {
-                        (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
-                    }
-                }
-                None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+                        (self.err_ty(), Vec::new())
+                    },
+                    sig.ret().clone(),
+                    sig.is_varargs,
+                ),
+                None => ((self.err_ty(), Vec::new()), self.err_ty(), true),
             };
         self.unify(&formal_receiver_ty, &receiver_ty);
 
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index cb52a71d9675a..92fa76c96fbdb 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -17,7 +17,7 @@ use hir_def::{
     nameres::MacroSubNs,
     resolver::{self, HasResolver, Resolver, TypeNs},
     type_ref::Mutability,
-    AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
+    AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
 };
 use hir_expand::{
     db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId,
@@ -198,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
         self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
     }
 
-    pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option {
-        self.imp.resolve_method_call(call).map(Function::from)
-    }
-
-    /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
-    pub fn resolve_method_call_field_fallback(
-        &self,
-        call: &ast::MethodCallExpr,
-    ) -> Option> {
-        self.imp
-            .resolve_method_call_fallback(call)
-            .map(|it| it.map_left(Function::from).map_right(Field::from))
-    }
-
     pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option {
         self.imp.resolve_await_to_poll(await_expr).map(Function::from)
     }
@@ -1048,14 +1034,15 @@ impl<'db> SemanticsImpl<'db> {
         self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
     }
 
-    fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option {
+    pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option {
         self.analyze(call.syntax())?.resolve_method_call(self.db, call)
     }
 
-    fn resolve_method_call_fallback(
+    /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
+    pub fn resolve_method_call_fallback(
         &self,
         call: &ast::MethodCallExpr,
-    ) -> Option> {
+    ) -> Option> {
         self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
     }
 
@@ -1087,6 +1074,13 @@ impl<'db> SemanticsImpl<'db> {
         self.analyze(field.syntax())?.resolve_field(self.db, field)
     }
 
+    pub fn resolve_field_fallback(
+        &self,
+        field: &ast::FieldExpr,
+    ) -> Option> {
+        self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
+    }
+
     pub fn resolve_record_field(
         &self,
         field: &ast::RecordExprField,
@@ -1298,7 +1292,7 @@ impl<'db> SemanticsImpl<'db> {
                     return None;
                 }
 
-                let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+                let func = self.resolve_method_call(method_call_expr)?;
                 let res = match func.self_param(self.db)?.access(self.db) {
                     Access::Shared | Access::Exclusive => true,
                     Access::Owned => false,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 7a31e6df1faee..73db6f8f0b86b 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -280,25 +280,49 @@ impl SourceAnalyzer {
         &self,
         db: &dyn HirDatabase,
         call: &ast::MethodCallExpr,
-    ) -> Option {
+    ) -> Option {
         let expr_id = self.expr_id(db, &call.clone().into())?;
         let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
 
-        Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
+        Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
     }
 
     pub(crate) fn resolve_method_call_fallback(
         &self,
         db: &dyn HirDatabase,
         call: &ast::MethodCallExpr,
-    ) -> Option> {
+    ) -> Option> {
         let expr_id = self.expr_id(db, &call.clone().into())?;
         let inference_result = self.infer.as_ref()?;
         match inference_result.method_resolution(expr_id) {
-            Some((f_in_trait, substs)) => {
-                Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
-            }
-            None => inference_result.field_resolution(expr_id).map(Either::Right),
+            Some((f_in_trait, substs)) => Some(Either::Left(
+                self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
+            )),
+            None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right),
+        }
+    }
+
+    pub(crate) fn resolve_field(
+        &self,
+        db: &dyn HirDatabase,
+        field: &ast::FieldExpr,
+    ) -> Option {
+        let expr_id = self.expr_id(db, &field.clone().into())?;
+        self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+    }
+
+    pub(crate) fn resolve_field_fallback(
+        &self,
+        db: &dyn HirDatabase,
+        field: &ast::FieldExpr,
+    ) -> Option> {
+        let expr_id = self.expr_id(db, &field.clone().into())?;
+        let inference_result = self.infer.as_ref()?;
+        match inference_result.field_resolution(expr_id) {
+            Some(field) => Some(Either::Left(field.into())),
+            None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
+                Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
+            }),
         }
     }
 
@@ -417,15 +441,6 @@ impl SourceAnalyzer {
         Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
     }
 
-    pub(crate) fn resolve_field(
-        &self,
-        db: &dyn HirDatabase,
-        field: &ast::FieldExpr,
-    ) -> Option {
-        let expr_id = self.expr_id(db, &field.clone().into())?;
-        self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
-    }
-
     pub(crate) fn resolve_record_field(
         &self,
         db: &dyn HirDatabase,
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index ef72fc3861a7f..ded5d4e3db534 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -492,7 +492,7 @@ impl NameRefClass {
         match_ast! {
             match parent {
                 ast::MethodCallExpr(method_call) => {
-                    sema.resolve_method_call_field_fallback(&method_call)
+                    sema.resolve_method_call_fallback(&method_call)
                         .map(|it| {
                             it.map_left(Definition::Function)
                                 .map_right(Definition::Field)
@@ -500,9 +500,12 @@ impl NameRefClass {
                         })
                 },
                 ast::FieldExpr(field_expr) => {
-                    sema.resolve_field(&field_expr)
-                        .map(Definition::Field)
-                        .map(NameRefClass::Definition)
+                    sema.resolve_field_fallback(&field_expr)
+                    .map(|it| {
+                        it.map_left(Definition::Field)
+                            .map_right(Definition::Function)
+                            .either(NameRefClass::Definition, NameRefClass::Definition)
+                    })
                 },
                 ast::RecordPatField(record_pat_field) => {
                     sema.resolve_record_pat_field(&record_pat_field)
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 8c9d58671e698..d5ec336fc7ed7 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -6698,3 +6698,30 @@ foo!(r"{$0aaaaa}");
         "#]],
     );
 }
+
+#[test]
+fn method_call_without_parens() {
+    check(
+        r#"
+struct S;
+impl S {
+    fn foo(&self, t: T) {}
+}
+
+fn main() {
+    S.foo$0;
+}
+"#,
+        expect![[r#"
+            *foo*
+
+            ```rust
+            test::S
+            ```
+
+            ```rust
+            fn foo(&self, t: T)
+            ```
+        "#]],
+    );
+}

From b1a8f83a0ca07a89f2461d93bea2a4b25236f3d8 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 18:46:36 +0100
Subject: [PATCH 74/80] fix: Smaller spans for unresolved field and method
 diagnostics

---
 Cargo.lock                                    |  4 +-
 crates/hir-expand/src/files.rs                | 15 +++++
 .../src/handlers/unresolved_field.rs          | 25 ++++++---
 .../src/handlers/unresolved_method.rs         | 55 +++++++++++++++++--
 crates/ide-diagnostics/src/lib.rs             | 15 ++++-
 crates/syntax/Cargo.toml                      |  2 +-
 crates/syntax/src/ptr.rs                      |  1 +
 7 files changed, 99 insertions(+), 18 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index c6c1e1e3c9685..46efbdd93c97f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1484,9 +1484,9 @@ dependencies = [
 
 [[package]]
 name = "rowan"
-version = "0.15.14"
+version = "0.15.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9672ea408d491b517a4dc370159ec6dd7cb5c5fd2f41b02883830339109ac76"
+checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
 dependencies = [
  "countme",
  "hashbrown",
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index 7e55f6be1e412..89f0685d5b679 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -314,6 +314,21 @@ impl InFile {
         }
     }
 
+    pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
+        match self.file_id.repr() {
+            HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
+            HirFileIdRepr::MacroFile(mac_file) => {
+                match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+                    Some((it, SyntaxContextId::ROOT)) => it,
+                    _ => {
+                        let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+                        loc.kind.original_call_range(db)
+                    }
+                }
+            }
+        }
+    }
+
     pub fn original_node_file_range_opt(
         self,
         db: &dyn db::ExpandDatabase,
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 0758706e45a27..321459412182f 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
 use syntax::{ast, AstNode, AstPtr};
 use text_edit::TextEdit;
 
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
 
 // Diagnostic: unresolved-field
 //
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_field(
     } else {
         ""
     };
-    Diagnostic::new_with_syntax_node_ptr(
-        ctx,
+    Diagnostic::new(
         DiagnosticCode::RustcHardError("E0559"),
         format!(
             "no field `{}` on type `{}`{method_suffix}",
             d.name.display(ctx.sema.db),
             d.receiver.display(ctx.sema.db)
         ),
-        d.expr.clone().map(|it| it.into()),
+        adjusted_display_range_new(ctx, d.expr, &|expr| {
+            Some(
+                match expr {
+                    ast::Expr::MethodCallExpr(it) => it.name_ref(),
+                    ast::Expr::FieldExpr(it) => it.name_ref(),
+                    _ => None,
+                }?
+                .syntax()
+                .text_range(),
+            )
+        }),
     )
     .with_fixes(fixes(ctx, d))
     .experimental()
@@ -79,7 +88,7 @@ mod tests {
             r#"
 fn main() {
     ().foo;
- // ^^^^^^ error: no field `foo` on type `()`
+    // ^^^ error: no field `foo` on type `()`
 }
 "#,
         );
@@ -95,7 +104,7 @@ impl Foo {
 }
 fn foo() {
     Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+     // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
 }
 "#,
         );
@@ -112,7 +121,7 @@ trait Bar {
 impl Bar for Foo {}
 fn foo() {
     Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+     // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
 }
 "#,
         );
@@ -131,7 +140,7 @@ impl Bar for Foo {
 }
 fn foo() {
     Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+     // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
 }
 "#,
         );
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index ae9f6744c40f9..464b0a710ea7b 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -8,7 +8,7 @@ use ide_db::{
 use syntax::{ast, AstNode, TextRange};
 use text_edit::TextEdit;
 
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
 
 // Diagnostic: unresolved-method
 //
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_method(
     } else {
         ""
     };
-    Diagnostic::new_with_syntax_node_ptr(
-        ctx,
+    Diagnostic::new(
         DiagnosticCode::RustcHardError("E0599"),
         format!(
             "no method `{}` on type `{}`{field_suffix}",
             d.name.display(ctx.sema.db),
             d.receiver.display(ctx.sema.db)
         ),
-        d.expr.clone().map(|it| it.into()),
+        adjusted_display_range_new(ctx, d.expr, &|expr| {
+            Some(
+                match expr {
+                    ast::Expr::MethodCallExpr(it) => it.name_ref(),
+                    ast::Expr::FieldExpr(it) => it.name_ref(),
+                    _ => None,
+                }?
+                .syntax()
+                .text_range(),
+            )
+        }),
     )
     .with_fixes(fixes(ctx, d))
     .experimental()
@@ -92,7 +101,41 @@ mod tests {
             r#"
 fn main() {
     ().foo();
- // ^^^^^^^^ error: no method `foo` on type `()`
+    // ^^^ error: no method `foo` on type `()`
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn smoke_test_in_macro_def_site() {
+        check_diagnostics(
+            r#"
+macro_rules! m {
+    ($rcv:expr) => {
+        $rcv.foo()
+    }
+}
+fn main() {
+    m!(());
+ // ^^^^^^ error: no method `foo` on type `()`
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn smoke_test_in_macro_call_site() {
+        check_diagnostics(
+            r#"
+macro_rules! m {
+    ($ident:ident) => {
+        ().$ident()
+    }
+}
+fn main() {
+    m!(foo);
+    // ^^^ error: no method `foo` on type `()`
 }
 "#,
         );
@@ -105,7 +148,7 @@ fn main() {
 struct Foo { bar: i32 }
 fn foo() {
     Foo { bar: i32 }.bar();
- // ^^^^^^^^^^^^^^^^^^^^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
+                  // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
 }
 "#,
         );
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 6cfd5f1832084..6541bf605794a 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -90,7 +90,7 @@ use stdx::never;
 use syntax::{
     algo::find_node_at_range,
     ast::{self, AstNode},
-    SyntaxNode, SyntaxNodePtr, TextRange,
+    AstPtr, SyntaxNode, SyntaxNodePtr, TextRange,
 };
 
 // FIXME: Make this an enum
@@ -584,3 +584,16 @@ fn adjusted_display_range(
             .unwrap_or(range),
     }
 }
+
+// FIXME Replace the one above with this one?
+fn adjusted_display_range_new(
+    ctx: &DiagnosticsContext<'_>,
+    diag_ptr: InFile>,
+    adj: &dyn Fn(N) -> Option,
+) -> FileRange {
+    let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id);
+    let node = diag_ptr.value.to_node(&source_file);
+    diag_ptr
+        .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
+        .original_node_file_range_rooted(ctx.sema.db)
+}
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 3b55921dc759e..7a7c0d267fede 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -16,7 +16,7 @@ doctest = false
 cov-mark = "2.0.0-pre.1"
 either.workspace = true
 itertools.workspace = true
-rowan = "0.15.11"
+rowan = "0.15.15"
 rustc-hash = "1.1.0"
 once_cell = "1.17.0"
 indexmap.workspace = true
diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs
index 07641b203bb86..8750147ee11af 100644
--- a/crates/syntax/src/ptr.rs
+++ b/crates/syntax/src/ptr.rs
@@ -33,6 +33,7 @@ impl std::fmt::Debug for AstPtr {
     }
 }
 
+impl Copy for AstPtr {}
 impl Clone for AstPtr {
     fn clone(&self) -> AstPtr {
         AstPtr { raw: self.raw.clone(), _ty: PhantomData }

From cf083fefc4ceb508ef34f02d189a24039f3457ef Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Fri, 8 Dec 2023 20:13:52 +0100
Subject: [PATCH 75/80] fix: Fix completion failing in format_args! with
 invalid template

---
 crates/hir-def/src/body/lower.rs              |  14 +-
 crates/hir-def/src/hir/format_args.rs         |  14 +-
 crates/ide-completion/src/tests/expression.rs | 154 ++++++++++++++++++
 crates/ide/src/view_hir.rs                    |   6 +-
 crates/ide/src/view_mir.rs                    |   6 +-
 crates/rust-analyzer/tests/slow-tests/tidy.rs |   1 +
 6 files changed, 187 insertions(+), 8 deletions(-)

diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index c22bd6e2e57cf..c6a9093201592 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -1611,7 +1611,11 @@ impl ExprCollector<'_> {
                     }
                 },
             ),
-            None => FormatArgs { template: Default::default(), arguments: args.finish() },
+            None => FormatArgs {
+                template: Default::default(),
+                arguments: args.finish(),
+                orphans: Default::default(),
+            },
         };
 
         // Create a list of all _unique_ (argument, format trait) combinations.
@@ -1750,7 +1754,13 @@ impl ExprCollector<'_> {
         });
         let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
             id: None,
-            statements: Box::default(),
+            // We collect the unused expressions here so that we still infer them instead of
+            // dropping them out of the expression tree
+            statements: fmt
+                .orphans
+                .into_iter()
+                .map(|expr| Statement::Expr { expr, has_semi: true })
+                .collect(),
             tail: Some(unsafe_arg_new),
         });
 
diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs
index 068abb27a25cf..7fc33abc7c9a1 100644
--- a/crates/hir-def/src/hir/format_args.rs
+++ b/crates/hir-def/src/hir/format_args.rs
@@ -3,6 +3,7 @@ use std::mem;
 
 use hir_expand::name::Name;
 use rustc_dependencies::parse_format as parse;
+use stdx::TupleExt;
 use syntax::{
     ast::{self, IsString},
     SmolStr, TextRange, TextSize,
@@ -14,6 +15,7 @@ use crate::hir::ExprId;
 pub struct FormatArgs {
     pub template: Box<[FormatArgsPiece]>,
     pub arguments: FormatArguments,
+    pub orphans: Vec,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -196,7 +198,11 @@ pub(crate) fn parse(
     let is_source_literal = parser.is_source_literal;
     if !parser.errors.is_empty() {
         // FIXME: Diagnose
-        return FormatArgs { template: Default::default(), arguments: args.finish() };
+        return FormatArgs {
+            template: Default::default(),
+            arguments: args.finish(),
+            orphans: vec![],
+        };
     }
 
     let to_span = |inner_span: parse::InnerSpan| {
@@ -419,7 +425,11 @@ pub(crate) fn parse(
         // FIXME: Diagnose
     }
 
-    FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
+    FormatArgs {
+        template: template.into_boxed_slice(),
+        arguments: args.finish(),
+        orphans: unused.into_iter().map(TupleExt::head).collect(),
+    }
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index e6969c8db300c..b4f936b35aead 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -1094,3 +1094,157 @@ pub struct UnstableButWeAreOnNightlyAnyway;
         "#]],
     );
 }
+
+#[test]
+fn inside_format_args_completions_work() {
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("{}", Foo.$0);
+}
+"#,
+        expect![[r#"
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+        "#]],
+    );
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("{}", Foo.f$0);
+}
+"#,
+        expect![[r#"
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+        "#]],
+    );
+}
+
+#[test]
+fn inside_faulty_format_args_completions_work() {
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("", Foo.$0);
+}
+"#,
+        expect![[r#"
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+        "#]],
+    );
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("", Foo.f$0);
+}
+"#,
+        expect![[r#"
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+        "#]],
+    );
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("{} {named} {captured} {named} {}", a, named = c, Foo.f$0);
+}
+"#,
+        expect![[r#"
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+        "#]],
+    );
+    check_empty(
+        r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+    fn foo(&self) {}
+}
+
+fn main() {
+    format_args!("{", Foo.f$0);
+}
+"#,
+        expect![[r#"
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn if     if expr {}
+            sn match  match expr {}
+            sn not    !expr
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
+            sn while  while expr {}
+        "#]],
+    );
+}
diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs
index d2bbbf6d26ab4..738b370a068e0 100644
--- a/crates/ide/src/view_hir.rs
+++ b/crates/ide/src/view_hir.rs
@@ -1,7 +1,7 @@
 use hir::{DefWithBody, Semantics};
 use ide_db::base_db::FilePosition;
 use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
 
 // Feature: View Hir
 //
@@ -19,7 +19,9 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option {
     let sema = Semantics::new(db);
     let source_file = sema.parse(position.file_id);
 
-    let item = find_node_at_offset::(source_file.syntax(), position.offset)?;
+    let item = ancestors_at_offset(source_file.syntax(), position.offset)
+        .filter(|it| ast::MacroCall::can_cast(it.kind()))
+        .find_map(ast::Item::cast)?;
     let def: DefWithBody = match item {
         ast::Item::Fn(it) => sema.to_def(&it)?.into(),
         ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/crates/ide/src/view_mir.rs b/crates/ide/src/view_mir.rs
index a36aba58bc0ed..52ed420669a7a 100644
--- a/crates/ide/src/view_mir.rs
+++ b/crates/ide/src/view_mir.rs
@@ -1,7 +1,7 @@
 use hir::{DefWithBody, Semantics};
 use ide_db::base_db::FilePosition;
 use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
 
 // Feature: View Mir
 //
@@ -18,7 +18,9 @@ fn body_mir(db: &RootDatabase, position: FilePosition) -> Option {
     let sema = Semantics::new(db);
     let source_file = sema.parse(position.file_id);
 
-    let item = find_node_at_offset::(source_file.syntax(), position.offset)?;
+    let item = ancestors_at_offset(source_file.syntax(), position.offset)
+        .filter(|it| ast::MacroCall::can_cast(it.kind()))
+        .find_map(ast::Item::cast)?;
     let def: DefWithBody = match item {
         ast::Item::Fn(it) => sema.to_def(&it)?.into(),
         ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 45adbf5c573be..dba336ea7d6dc 100644
--- a/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -250,6 +250,7 @@ fn check_dbg(path: &Path, text: &str) {
         // We have .dbg postfix
         "ide-completion/src/completions/postfix.rs",
         "ide-completion/src/completions/keyword.rs",
+        "ide-completion/src/tests/expression.rs",
         "ide-completion/src/tests/proc_macros.rs",
         // The documentation in string literals may contain anything for its own purposes
         "ide-completion/src/lib.rs",

From 9337519df5f35f7e093650592e380f301a63bce5 Mon Sep 17 00:00:00 2001
From: hkalbasi 
Date: Sat, 9 Dec 2023 23:32:51 +0330
Subject: [PATCH 76/80] Replace `doc_comments_and_attrs` with `collect_attrs`

---
 crates/hir/src/lib.rs                             | 14 ++++++--------
 .../src/handlers/unresolved_extern_crate.rs       | 15 +++++++++++++++
 2 files changed, 21 insertions(+), 8 deletions(-)

diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index ca838c7a51e10..eff98b21d2c8d 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -59,7 +59,7 @@ use hir_def::{
     Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
     TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
 };
-use hir_expand::{name::name, MacroCallKind};
+use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind};
 use hir_ty::{
     all_super_traits, autoderef, check_orphan_rules,
     consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
@@ -81,7 +81,7 @@ use once_cell::unsync::Lazy;
 use rustc_hash::FxHashSet;
 use stdx::{impl_from, never};
 use syntax::{
-    ast::{self, HasAttrs as _, HasDocComments, HasName},
+    ast::{self, HasAttrs as _, HasName},
     AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
 };
 use triomphe::Arc;
@@ -974,10 +974,9 @@ fn precise_macro_call_location(
             // Compute the precise location of the macro name's token in the derive
             // list.
             let token = (|| {
-                let derive_attr = node
-                    .doc_comments_and_attrs()
+                let derive_attr = collect_attrs(&node)
                     .nth(derive_attr_index.ast_index())
-                    .and_then(Either::left)?;
+                    .and_then(|x| Either::left(x.1))?;
                 let token_tree = derive_attr.meta()?.token_tree()?;
                 let group_by = token_tree
                     .syntax()
@@ -1002,10 +1001,9 @@ fn precise_macro_call_location(
         }
         MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
             let node = ast_id.to_node(db.upcast());
-            let attr = node
-                .doc_comments_and_attrs()
+            let attr = collect_attrs(&node)
                 .nth(invoc_attr_index.ast_index())
-                .and_then(Either::left)
+                .and_then(|x| Either::left(x.1))
                 .unwrap_or_else(|| {
                     panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
                 });
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
index f8265b63275fc..71c501a336b1e 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -44,6 +44,21 @@ extern crate core;
 extern crate self as foo;
 struct Foo;
 use foo::Foo as Bar;
+"#,
+        );
+    }
+
+    #[test]
+    fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
+        check_diagnostics(
+            r#"
+//- /lib.rs
+  #[macro_use] extern crate doesnotexist;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: unresolved extern crate
+    mod _test_inner {
+        #![empty_attr]
+      //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+    }
 "#,
         );
     }

From 306c907425cc0a5ca8459df7277ab21711529e60 Mon Sep 17 00:00:00 2001
From: Lukas Wirth 
Date: Sun, 10 Dec 2023 14:44:40 +0100
Subject: [PATCH 77/80] fix: Fix view mir, hir and eval function not working
 when cursor is inside macros

---
 crates/ide/src/interpret_function.rs | 14 ++++++++------
 crates/ide/src/view_hir.rs           |  2 +-
 crates/ide/src/view_mir.rs           |  2 +-
 3 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs
index d06ffd535758b..21697490482ec 100644
--- a/crates/ide/src/interpret_function.rs
+++ b/crates/ide/src/interpret_function.rs
@@ -1,10 +1,10 @@
 use hir::Semantics;
-use ide_db::base_db::SourceDatabaseExt;
-use ide_db::RootDatabase;
-use ide_db::{base_db::FilePosition, LineIndexDatabase};
+use ide_db::{
+    base_db::{FilePosition, SourceDatabaseExt},
+    LineIndexDatabase, RootDatabase,
+};
 use std::{fmt::Write, time::Instant};
-use syntax::TextRange;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
 
 // Feature: Interpret Function
 //
@@ -28,7 +28,9 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option(source_file.syntax(), position.offset)?;
+    let item = ancestors_at_offset(source_file.syntax(), position.offset)
+        .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+        .find_map(ast::Item::cast)?;
     let def = match item {
         ast::Item::Fn(it) => sema.to_def(&it)?,
         _ => return None,
diff --git a/crates/ide/src/view_hir.rs b/crates/ide/src/view_hir.rs
index 738b370a068e0..9abe54cd39036 100644
--- a/crates/ide/src/view_hir.rs
+++ b/crates/ide/src/view_hir.rs
@@ -20,7 +20,7 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option {
     let source_file = sema.parse(position.file_id);
 
     let item = ancestors_at_offset(source_file.syntax(), position.offset)
-        .filter(|it| ast::MacroCall::can_cast(it.kind()))
+        .filter(|it| !ast::MacroCall::can_cast(it.kind()))
         .find_map(ast::Item::cast)?;
     let def: DefWithBody = match item {
         ast::Item::Fn(it) => sema.to_def(&it)?.into(),
diff --git a/crates/ide/src/view_mir.rs b/crates/ide/src/view_mir.rs
index 52ed420669a7a..08d810c134628 100644
--- a/crates/ide/src/view_mir.rs
+++ b/crates/ide/src/view_mir.rs
@@ -19,7 +19,7 @@ fn body_mir(db: &RootDatabase, position: FilePosition) -> Option {
     let source_file = sema.parse(position.file_id);
 
     let item = ancestors_at_offset(source_file.syntax(), position.offset)
-        .filter(|it| ast::MacroCall::can_cast(it.kind()))
+        .filter(|it| !ast::MacroCall::can_cast(it.kind()))
         .find_map(ast::Item::cast)?;
     let def: DefWithBody = match item {
         ast::Item::Fn(it) => sema.to_def(&it)?.into(),

From 1630477985097fcc9457dbd5f11b75ddb52bfe5b Mon Sep 17 00:00:00 2001
From: Maybe Waffle 
Date: Wed, 6 Dec 2023 22:11:16 +0000
Subject: [PATCH 78/80] fix: Don't emit "missing items" diagnostic for negative
 impls

---
 crates/hir/src/lib.rs                              |  3 ++-
 .../src/handlers/trait_impl_missing_assoc_item.rs  | 14 ++++++++++++++
 2 files changed, 16 insertions(+), 1 deletion(-)

diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 4a0c384e8a305..a5281ec32bb2f 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -671,7 +671,8 @@ impl Module {
                 _ => (),
             };
 
-            if let Some(trait_) = trait_ {
+            // Negative impls can't have items, don't emit missing items diagnostic for them
+            if let (false, Some(trait_)) = (impl_is_negative, trait_) {
                 let items = &db.trait_data(trait_.into()).items;
                 let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
                     AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index 51923797ac91a..56188cddf0b2e 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -112,4 +112,18 @@ impl Trait for () {
 "#,
         );
     }
+
+    #[test]
+    fn negative_impl() {
+        check_diagnostics(
+            r#"
+trait Trait {
+    fn item();
+}
+
+// Negative impls don't require any items (in fact, the forbid providing any)
+impl !Trait for () {}
+"#,
+        )
+    }
 }

From c47cb47adf094277c270eb890c9326b3c2c6c42b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Mon, 11 Dec 2023 11:55:43 +0200
Subject: [PATCH 79/80] Fix typo in cfg

---
 src/tools/rust-analyzer/crates/hir-expand/src/eager.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index ef7200f615ccf..1e2722e846463 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -73,7 +73,7 @@ pub fn expand_eager_macro_input(
         )
     };
     let err = parse_err.or(err);
-    if cfg!(debug) {
+    if cfg!(debug_assertions) {
         arg_map.finish();
     }
 

From ce8d5fba859fc02146b27d03e7ea670c155ce88b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= 
Date: Mon, 11 Dec 2023 12:23:43 +0200
Subject: [PATCH 80/80] Add rust_analyzer to EXTRA_CHECK_CFGS

---
 src/bootstrap/src/lib.rs | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs
index fd0b61eae8d7c..8d6598c916a25 100644
--- a/src/bootstrap/src/lib.rs
+++ b/src/bootstrap/src/lib.rs
@@ -77,6 +77,8 @@ const EXTRA_CHECK_CFGS: &[(Option, &str, Option<&[&'static str]>)] = &[
     (None, "bootstrap", None),
     (Some(Mode::Rustc), "parallel_compiler", None),
     (Some(Mode::ToolRustc), "parallel_compiler", None),
+    (Some(Mode::ToolRustc), "rust_analyzer", None),
+    (Some(Mode::ToolStd), "rust_analyzer", None),
     (Some(Mode::Codegen), "parallel_compiler", None),
     (Some(Mode::Std), "stdarch_intel_sde", None),
     (Some(Mode::Std), "no_fp_fmt_parse", None),