Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optimize TokenMap updates and traversals for modified files only #6464

Merged
merged 9 commits into from
Aug 29, 2024
17 changes: 12 additions & 5 deletions sway-lsp/benches/lsp_benchmarks/compile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ fn benchmarks(c: &mut Criterion) {
b.iter(|| {
let engines = Engines::default();
let _ = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental)
session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental)
.unwrap(),
);
})
Expand All @@ -38,12 +38,19 @@ fn benchmarks(c: &mut Criterion) {
c.bench_function("traverse", |b| {
let engines = Engines::default();
let results = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental).unwrap(),
session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental).unwrap(),
);
let session = Arc::new(session::Session::new());
b.iter(|| {
let _ =
black_box(session::traverse(results.clone(), &engines, session.clone()).unwrap());
let _ = black_box(
session::traverse(
results.clone(),
&engines,
session.clone(),
lsp_mode.as_ref(),
)
.unwrap(),
);
})
});

Expand All @@ -53,7 +60,7 @@ fn benchmarks(c: &mut Criterion) {
b.iter(|| {
for _ in 0..NUM_DID_CHANGE_ITERATIONS {
let _ = black_box(
session::compile(&build_plan, &engines, None, lsp_mode.clone(), experimental)
session::compile(&build_plan, &engines, None, lsp_mode.as_ref(), experimental)
.unwrap(),
);
}
Expand Down
124 changes: 107 additions & 17 deletions sway-lsp/src/core/session.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ use crate::{
},
error::{DirectoryError, DocumentError, LanguageServerError},
traverse::{
dependency, lexed_tree, parsed_tree::ParsedTree, typed_tree::TypedTree, ParseContext,
dependency, lexed_tree::LexedTree, parsed_tree::ParsedTree, typed_tree::TypedTree,
ParseContext,
},
};
use dashmap::DashMap;
Expand All @@ -31,6 +32,7 @@ use std::{
sync::{atomic::AtomicBool, Arc},
time::SystemTime,
};
use sway_ast::{attribute::Annotated, ItemKind};
use sway_core::{
decl_engine::DeclEngine,
language::{
Expand Down Expand Up @@ -275,15 +277,15 @@ pub fn compile(
build_plan: &BuildPlan,
engines: &Engines,
retrigger_compilation: Option<Arc<AtomicBool>>,
lsp_mode: Option<LspConfig>,
lsp_mode: Option<&LspConfig>,
experimental: sway_core::ExperimentalFlags,
) -> Result<Vec<(Option<Programs>, Handler)>, LanguageServerError> {
let _p = tracing::trace_span!("compile").entered();
pkg::check(
build_plan,
BuildTarget::default(),
true,
lsp_mode,
lsp_mode.cloned(),
true,
engines,
retrigger_compilation,
Expand All @@ -298,9 +300,20 @@ pub fn traverse(
results: Vec<(Option<Programs>, Handler)>,
engines_clone: &Engines,
session: Arc<Session>,
lsp_mode: Option<&LspConfig>,
) -> Result<Option<CompileResults>, LanguageServerError> {
let _p = tracing::trace_span!("traverse").entered();
session.token_map.clear();
let modified_file = lsp_mode.and_then(|mode| {
mode.file_versions
.iter()
.find_map(|(path, version)| version.map(|_| path.clone()))
});
if let Some(path) = &modified_file {
session.token_map.remove_tokens_for_file(path);
} else {
session.token_map.clear();
}

session.metrics.clear();
let mut diagnostics: CompileResults = (Vec::default(), Vec::default());
let results_len = results.len();
Expand Down Expand Up @@ -337,6 +350,14 @@ pub fn traverse(
let path = engines.se().get_path(source_id);
let program_id = program_id_from_path(&path, engines)?;
session.metrics.insert(program_id, metrics);

if let Some(modified_file) = &modified_file {
let modified_program_id = program_id_from_path(modified_file, engines)?;
// We can skip traversing the programs for this iteration as they are unchanged.
if program_id != modified_program_id {
continue;
}
}
}

// Get a reference to the typed program AST.
Expand All @@ -356,17 +377,23 @@ pub fn traverse(
// The final element in the results is the main program.
if i == results_len - 1 {
// First, populate our token_map with sway keywords.
lexed_tree::parse(&lexed, &ctx);
let lexed_tree = LexedTree::new(&ctx);
lexed_tree.collect_module_kinds(&lexed);
parse_lexed_program(&lexed, &ctx, &modified_file, |an, _ctx| {
lexed_tree.traverse_node(an)
});

// Next, populate our token_map with un-typed yet parsed ast nodes.
let parsed_tree = ParsedTree::new(&ctx);
parsed_tree.collect_module_spans(&parsed);
parse_ast_to_tokens(&parsed, &ctx, |an, _ctx| parsed_tree.traverse_node(an));
parse_ast_to_tokens(&parsed, &ctx, &modified_file, |an, _ctx| {
parsed_tree.traverse_node(an)
});

// Finally, populate our token_map with typed ast nodes.
let typed_tree = TypedTree::new(&ctx);
typed_tree.collect_module_spans(typed_program);
parse_ast_to_typed_tokens(typed_program, &ctx, |node, _ctx| {
parse_ast_to_typed_tokens(typed_program, &ctx, &modified_file, |node, _ctx| {
typed_tree.traverse_node(node);
});

Expand All @@ -376,11 +403,11 @@ pub fn traverse(
compiled_program.typed = Some(typed_program.clone());
} else {
// Collect tokens from dependencies and the standard library prelude.
parse_ast_to_tokens(&parsed, &ctx, |an, ctx| {
parse_ast_to_tokens(&parsed, &ctx, &modified_file, |an, ctx| {
dependency::collect_parsed_declaration(an, ctx);
});

parse_ast_to_typed_tokens(typed_program, &ctx, |node, ctx| {
parse_ast_to_typed_tokens(typed_program, &ctx, &modified_file, |node, ctx| {
dependency::collect_typed_declaration(node, ctx);
});
}
Expand All @@ -406,7 +433,7 @@ pub fn parse_project(
&build_plan,
engines,
retrigger_compilation,
lsp_mode.clone(),
lsp_mode.as_ref(),
experimental,
)?;

Expand All @@ -417,7 +444,7 @@ pub fn parse_project(
return Err(LanguageServerError::ProgramsIsNone);
}

let diagnostics = traverse(results, engines, session.clone())?;
let diagnostics = traverse(results, engines, session.clone(), lsp_mode.as_ref())?;
if let Some(config) = &lsp_mode {
// Only write the diagnostics results on didSave or didOpen.
if !config.optimized_build {
Expand All @@ -435,13 +462,60 @@ pub fn parse_project(
Ok(())
}

/// Parse the [LexedProgram] to populate the [TokenMap] with lexed nodes.
pub fn parse_lexed_program(
lexed_program: &LexedProgram,
ctx: &ParseContext,
modified_file: &Option<PathBuf>,
f: impl Fn(&Annotated<ItemKind>, &ParseContext) + Sync,
) {
let should_process = |item: &&Annotated<ItemKind>| {
modified_file
JoshuaBatty marked this conversation as resolved.
Show resolved Hide resolved
.as_ref()
.map(|path| {
item.span()
.source_id()
.map_or(false, |id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};

lexed_program
.root
.tree
.items
.iter()
.chain(
lexed_program
.root
.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.tree.items),
)
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|item| f(item, ctx));
}

/// Parse the [ParseProgram] AST to populate the [TokenMap] with parsed AST nodes.
fn parse_ast_to_tokens(
parse_program: &ParseProgram,
ctx: &ParseContext,
modified_file: &Option<PathBuf>,
f: impl Fn(&AstNode, &ParseContext) + Sync,
) {
let nodes = parse_program
let should_process = |node: &&AstNode| {
modified_file
.as_ref()
.map(|path| {
node.span
.source_id()
.map_or(false, |id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};

parse_program
.root
.tree
.root_nodes
Expand All @@ -452,17 +526,31 @@ fn parse_ast_to_tokens(
.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.tree.root_nodes),
)
.collect::<Vec<_>>();
nodes.par_iter().for_each(|n| f(n, ctx));
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|n| f(n, ctx));
}

/// Parse the [ty::TyProgram] AST to populate the [TokenMap] with typed AST nodes.
fn parse_ast_to_typed_tokens(
typed_program: &ty::TyProgram,
ctx: &ParseContext,
modified_file: &Option<PathBuf>,
f: impl Fn(&ty::TyAstNode, &ParseContext) + Sync,
) {
let nodes = typed_program
let should_process = |node: &&ty::TyAstNode| {
modified_file
.as_ref()
.map(|path| {
node.span
.source_id()
.map_or(false, |id| ctx.engines.se().get_path(id) == *path)
})
.unwrap_or(true)
};

typed_program
.root
.all_nodes
.iter()
Expand All @@ -472,8 +560,10 @@ fn parse_ast_to_typed_tokens(
.submodules_recursive()
.flat_map(|(_, submodule)| &submodule.module.all_nodes),
)
.collect::<Vec<_>>();
nodes.par_iter().for_each(|n| f(n, ctx));
.filter(should_process)
.collect::<Vec<_>>()
.par_iter()
.for_each(|n| f(n, ctx));
}

/// Create runnables if the `TyProgramKind` of the `TyProgram` is a script.
Expand Down
11 changes: 10 additions & 1 deletion sway-lsp/src/core/token_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use dashmap::{
DashMap,
};
use lsp_types::{Position, Url};
use std::{thread, time::Duration};
use std::{path::PathBuf, thread, time::Duration};
use sway_core::{engine_threading::SpannedWithEngines, language::ty, type_system::TypeId, Engines};
use sway_types::Ident;

Expand Down Expand Up @@ -235,6 +235,15 @@ impl<'a> TokenMap {
_ => None,
})
}

/// Remove all tokens for the given file from the token map.
pub fn remove_tokens_for_file(&self, path_to_remove: &PathBuf) {
self.0.retain(|key, _value| {
key.path
.as_ref()
.map_or(true, |path| path != path_to_remove)
});
}
}

impl std::ops::Deref for TokenMap {
Expand Down
39 changes: 23 additions & 16 deletions sway-lsp/src/traverse/lexed_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use crate::{
};
use rayon::iter::{ParallelBridge, ParallelIterator};
use sway_ast::{
assignable::ElementAccess, expr::LoopControlFlow, ty::TyTupleDescriptor, Assignable,
CodeBlockContents, ConfigurableField, Expr, ExprArrayDescriptor, ExprStructField,
assignable::ElementAccess, attribute::Annotated, expr::LoopControlFlow, ty::TyTupleDescriptor,
Assignable, CodeBlockContents, ConfigurableField, Expr, ExprArrayDescriptor, ExprStructField,
ExprTupleDescriptor, FnArg, FnArgs, FnSignature, IfCondition, IfExpr, ItemAbi,
ItemConfigurable, ItemConst, ItemEnum, ItemFn, ItemImpl, ItemImplItem, ItemKind, ItemStorage,
ItemStruct, ItemTrait, ItemTypeAlias, ItemUse, MatchBranchKind, ModuleKind, Pattern,
Expand All @@ -15,21 +15,28 @@ use sway_ast::{
use sway_core::language::{lexed::LexedProgram, HasSubmodules};
use sway_types::{Ident, Span, Spanned};

pub fn parse(lexed_program: &LexedProgram, ctx: &ParseContext) {
insert_module_kind(ctx, &lexed_program.root.tree.kind);
adaptive_iter(&lexed_program.root.tree.items, |item| {
item.value.parse(ctx);
});

lexed_program
.root
.submodules_recursive()
.for_each(|(_, dep)| {
insert_module_kind(ctx, &dep.module.tree.kind);
adaptive_iter(&dep.module.tree.items, |item| {
item.value.parse(ctx);
pub struct LexedTree<'a> {
ctx: &'a ParseContext<'a>,
}

impl<'a> LexedTree<'a> {
pub fn new(ctx: &'a ParseContext<'a>) -> Self {
Self { ctx }
}

pub fn traverse_node(&self, node: &Annotated<ItemKind>) {
node.value.parse(self.ctx);
}

pub fn collect_module_kinds(&self, lexed_program: &LexedProgram) {
insert_module_kind(self.ctx, &lexed_program.root.tree.kind);
lexed_program
.root
.submodules_recursive()
.for_each(|(_, dep)| {
insert_module_kind(self.ctx, &dep.module.tree.kind);
});
});
}
}

fn insert_module_kind(ctx: &ParseContext, kind: &ModuleKind) {
Expand Down
Loading