Skip to content

feat: Edition aware parser #17620

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Jul 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
6 changes: 5 additions & 1 deletion crates/base-db/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use cfg::CfgOptions;
use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx};
use rustc_hash::{FxHashMap, FxHashSet};
use span::Edition;
use span::{Edition, EditionedFileId};
use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};

Expand Down Expand Up @@ -662,6 +662,10 @@ impl CrateData {
fn add_dep(&mut self, dep: Dependency) {
self.dependencies.push(dep)
}

pub fn root_file_id(&self) -> EditionedFileId {
EditionedFileId::new(self.root_file_id, self.edition)
}
}

impl Extend<(String, String)> for Env {
Expand Down
17 changes: 9 additions & 8 deletions crates/base-db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@ mod input;
use std::panic;

use salsa::Durability;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::FileId;

pub use crate::{
change::FileChange,
Expand All @@ -18,8 +20,7 @@ pub use crate::{
},
};
pub use salsa::{self, Cancelled};
pub use span::{FilePosition, FileRange};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};

pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};

Expand Down Expand Up @@ -58,10 +59,10 @@ pub trait FileLoader {
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;

/// Returns the set of errors obtained from parsing the file including validation errors.
fn parse_errors(&self, file_id: FileId) -> Option<Arc<[SyntaxError]>>;
fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;

/// The crate graph.
#[salsa::input]
Expand All @@ -82,14 +83,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}

fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
let _p = tracing::info_span!("parse", ?file_id).entered();
let (file_id, edition) = file_id.unpack();
let text = db.file_text(file_id);
// FIXME: Edition based parsing
SourceFile::parse(&text, span::Edition::CURRENT)
SourceFile::parse(&text, edition)
}

fn parse_errors(db: &dyn SourceDatabase, file_id: FileId) -> Option<Arc<[SyntaxError]>> {
fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
let errors = db.parse(file_id).errors();
match &*errors {
[] => None,
Expand Down
7 changes: 5 additions & 2 deletions crates/hir-def/src/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -657,19 +657,22 @@ mod tests {
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.

use intern::Symbol;
use span::EditionedFileId;
use triomphe::Arc;

use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
use span::FileId;
use syntax::{ast, AstNode, TextRange};

use crate::attr::{DocAtom, DocExpr};

fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input, span::Edition::CURRENT).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(
EditionedFileId::current_edition(FileId::from_raw(0)),
)));
let tt = syntax_node_to_token_tree(
tt.syntax(),
map.as_ref(),
Expand Down
7 changes: 4 additions & 3 deletions crates/hir-def/src/body/scope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,8 +288,9 @@ fn compute_expr_scopes(

#[cfg(test)]
mod tests {
use base_db::{FileId, SourceDatabase};
use base_db::SourceDatabase;
use hir_expand::{name::AsName, InFile};
use span::FileId;
use syntax::{algo::find_node_at_offset, ast, AstNode};
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, extract_offset};
Expand Down Expand Up @@ -325,7 +326,7 @@ mod tests {

let file_syntax = db.parse(file_id).syntax_node();
let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
let function = find_function(&db, file_id);
let function = find_function(&db, file_id.file_id());

let scopes = db.expr_scopes(function.into());
let (_body, source_map) = db.body_with_source_map(function.into());
Expand Down Expand Up @@ -480,7 +481,7 @@ fn foo() {
.expect("failed to find a name at the target offset");
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();

let function = find_function(&db, file_id);
let function = find_function(&db, file_id.file_id());

let scopes = db.expr_scopes(function.into());
let (body, source_map) = db.body_with_source_map(function.into());
Expand Down
13 changes: 8 additions & 5 deletions crates/hir-def/src/db.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
//! Defines database & queries for name resolution.
use base_db::{salsa, CrateId, FileId, SourceDatabase, Upcast};
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
use either::Either;
use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
use intern::{sym, Interned};
use la_arena::ArenaMap;
use span::MacroCallId;
use span::{EditionedFileId, MacroCallId};
use syntax::{ast, AstPtr};
use triomphe::Arc;

Expand Down Expand Up @@ -239,11 +239,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba

fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;

fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, FileId)>;
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>;
}

// return: macro call id and include file id
fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId, FileId)> {
fn include_macro_invoc(
db: &dyn DefDatabase,
krate: CrateId,
) -> Vec<(MacroCallId, EditionedFileId)> {
db.crate_def_map(krate)
.modules
.values()
Expand All @@ -257,7 +260,7 @@ fn include_macro_invoc(db: &dyn DefDatabase, krate: CrateId) -> Vec<(MacroCallId
}

fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
let file = db.crate_graph()[crate_id].root_file_id;
let file = db.crate_graph()[crate_id].root_file_id();
let item_tree = db.file_item_tree(file.into());
let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
for attr in &**attrs {
Expand Down
56 changes: 56 additions & 0 deletions crates/hir-def/src/macro_expansion_tests/mbe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1921,3 +1921,59 @@ fn f() {
"#]],
);
}

#[test]
fn test_edition_handling_out() {
check(
r#"
//- /main.rs crate:main deps:old edition:2021
macro_rules! r#try {
($it:expr) => {
$it?
};
}
fn f() {
old::invoke_bare_try!(0);
}
//- /old.rs crate:old edition:2015
#[macro_export]
macro_rules! invoke_bare_try {
($it:expr) => {
try!($it)
};
}
"#,
expect![[r#"
macro_rules! r#try {
($it:expr) => {
$it?
};
}
fn f() {
try!(0);
}
"#]],
);
}

#[test]
fn test_edition_handling_in() {
check(
r#"
//- /main.rs crate:main deps:old edition:2021
fn f() {
old::parse_try_old!(try!{});
}
//- /old.rs crate:old edition:2015
#[macro_export]
macro_rules! parse_try_old {
($it:expr) => {};
}
"#,
expect![[r#"
fn f() {
;
}
"#]],
);
}
18 changes: 10 additions & 8 deletions crates/hir-def/src/nameres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,15 +59,15 @@ mod tests;

use std::ops::Deref;

use base_db::{CrateId, FileId};
use base_db::CrateId;
use hir_expand::{
name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId,
};
use intern::Symbol;
use itertools::Itertools;
use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, FileAstId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{ast, SmolStr};
use triomphe::Arc;
Expand Down Expand Up @@ -244,14 +244,14 @@ impl std::ops::Index<LocalModuleId> for DefMap {
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum ModuleOrigin {
CrateRoot {
definition: FileId,
definition: EditionedFileId,
},
/// Note that non-inline modules, by definition, live inside non-macro file.
File {
is_mod_rs: bool,
declaration: FileAstId<ast::Module>,
declaration_tree_id: ItemTreeId<Mod>,
definition: FileId,
definition: EditionedFileId,
},
Inline {
definition_tree_id: ItemTreeId<Mod>,
Expand All @@ -277,7 +277,7 @@ impl ModuleOrigin {
}
}

pub fn file_id(&self) -> Option<FileId> {
pub fn file_id(&self) -> Option<EditionedFileId> {
match self {
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
Some(*definition)
Expand Down Expand Up @@ -339,7 +339,7 @@ impl DefMap {
let _p = tracing::info_span!("crate_def_map_query", ?name).entered();

let module_data = ModuleData::new(
ModuleOrigin::CrateRoot { definition: krate.root_file_id },
ModuleOrigin::CrateRoot { definition: krate.root_file_id() },
Visibility::Public,
);

Expand All @@ -350,7 +350,7 @@ impl DefMap {
None,
);
let def_map =
collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id.into(), None));
collector::collect_defs(db, def_map, TreeId::new(krate.root_file_id().into(), None));

Arc::new(def_map)
}
Expand Down Expand Up @@ -433,7 +433,9 @@ impl DefMap {
pub fn modules_for_file(&self, file_id: FileId) -> impl Iterator<Item = LocalModuleId> + '_ {
self.modules
.iter()
.filter(move |(_id, data)| data.origin.file_id() == Some(file_id))
.filter(move |(_id, data)| {
data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id)
})
.map(|(id, _data)| id)
}

Expand Down
8 changes: 4 additions & 4 deletions crates/hir-def/src/nameres/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

use std::{cmp::Ordering, iter, mem, ops::Not};

use base_db::{CrateId, CrateOrigin, Dependency, FileId, LangCrateOrigin};
use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
Expand All @@ -22,7 +22,7 @@ use itertools::{izip, Itertools};
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
use span::{Edition, EditionedFileId, ErasedFileAstId, FileAstId, SyntaxContextId};
use syntax::ast;
use triomphe::Arc;

Expand Down Expand Up @@ -272,7 +272,7 @@ impl DefCollector<'_> {
let _p = tracing::info_span!("seed_with_top_level").entered();

let crate_graph = self.db.crate_graph();
let file_id = crate_graph[self.def_map.krate].root_file_id;
let file_id = crate_graph[self.def_map.krate].root_file_id();
let item_tree = self.db.file_item_tree(file_id.into());
let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap();
Expand Down Expand Up @@ -2003,7 +2003,7 @@ impl ModCollector<'_, '_> {
&mut self,
name: Name,
declaration: FileAstId<ast::Module>,
definition: Option<(FileId, bool)>,
definition: Option<(EditionedFileId, bool)>,
visibility: &crate::visibility::RawVisibility,
mod_tree_id: FileItemTreeId<Mod>,
) -> LocalModuleId {
Expand Down
14 changes: 10 additions & 4 deletions crates/hir-def/src/nameres/mod_resolution.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
use base_db::AnchoredPath;
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use span::EditionedFileId;
use syntax::ToSmolStr as _;

use crate::{db::DefDatabase, HirFileId};
Expand Down Expand Up @@ -64,7 +65,7 @@ impl ModDir {
file_id: HirFileId,
name: &Name,
attr_path: Option<&str>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
) -> Result<(EditionedFileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();

let mut candidate_files = ArrayVec::<_, 2>::new();
Expand Down Expand Up @@ -92,7 +93,7 @@ impl ModDir {

let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
for candidate in candidate_files.iter() {
let path = AnchoredPath { anchor: orig_file_id, path: candidate.as_str() };
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
if let Some(file_id) = db.resolve_path(path) {
let is_mod_rs = candidate.ends_with("/mod.rs");

Expand All @@ -103,7 +104,12 @@ impl ModDir {
DirPath::new(format!("{}/", name.display(db.upcast())))
};
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
return Ok((file_id, is_mod_rs, mod_dir));
return Ok((
// FIXME: Edition, is this rightr?
EditionedFileId::new(file_id, orig_file_id.edition()),
is_mod_rs,
mod_dir,
));
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions crates/hir-def/src/nameres/tests/incremental.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, ra_fixture_change);
db.set_file_text(pos.file_id.file_id(), ra_fixture_change);

{
let events = db.log_executed(|| {
Expand Down Expand Up @@ -266,7 +266,7 @@ fn quux() { 92 }
m!(Y);
m!(Z);
"#;
db.set_file_text(pos.file_id, new_text);
db.set_file_text(pos.file_id.file_id(), new_text);

{
let events = db.log_executed(|| {
Expand Down
Loading