Skip to content

Commit

Permalink
feat: faster LSP by caching file managers (#6047)
Browse files Browse the repository at this point in the history
# Description

## Problem

LSP is still relatively slow and there are many optimization
opportunities we can use.

## Summary

This is just the second commit of #6045

## Additional Context

## Documentation

Check one:
- [x] No documentation needed.
- [ ] Documentation included in this PR.
- [ ] **[For Experimental Features]** Documentation to be submitted in a
separate PR.

# PR Checklist

- [x] I have tested the changes locally.
- [x] I have formatted the changes with [Prettier](https://prettier.io/)
and/or `cargo fmt` on default settings.
  • Loading branch information
asterite authored Sep 16, 2024
1 parent 1550278 commit c48a4f8
Show file tree
Hide file tree
Showing 5 changed files with 108 additions and 25 deletions.
24 changes: 17 additions & 7 deletions tooling/lsp/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use nargo::{
use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection};
use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING};
use noirc_frontend::{
graph::{CrateId, CrateName},
graph::{CrateGraph, CrateId, CrateName},
hir::{
def_map::{parse_file, CrateDefMap},
Context, FunctionNameMatch, ParsedFiles,
Expand Down Expand Up @@ -92,15 +92,26 @@ pub struct LspState {
open_documents_count: usize,
input_files: HashMap<String, String>,
cached_lenses: HashMap<String, Vec<CodeLens>>,
cached_definitions: HashMap<PathBuf, NodeInterner>,
cached_parsed_files: HashMap<PathBuf, (usize, (ParsedModule, Vec<ParserError>))>,
cached_def_maps: HashMap<PathBuf, BTreeMap<CrateId, CrateDefMap>>,
workspace_cache: HashMap<PathBuf, WorkspaceCacheData>,
package_cache: HashMap<PathBuf, PackageCacheData>,
options: LspInitializationOptions,

// Tracks files that currently have errors, by package root.
files_with_errors: HashMap<PathBuf, HashSet<Url>>,
}

struct WorkspaceCacheData {
file_manager: FileManager,
}

struct PackageCacheData {
crate_id: CrateId,
crate_graph: CrateGraph,
node_interner: NodeInterner,
def_maps: BTreeMap<CrateId, CrateDefMap>,
}

impl LspState {
fn new(
client: &ClientSocket,
Expand All @@ -112,12 +123,11 @@ impl LspState {
solver: WrapperSolver(Box::new(solver)),
input_files: HashMap::new(),
cached_lenses: HashMap::new(),
cached_definitions: HashMap::new(),
open_documents_count: 0,
cached_parsed_files: HashMap::new(),
cached_def_maps: HashMap::new(),
workspace_cache: HashMap::new(),
package_cache: HashMap::new(),
open_documents_count: 0,
options: Default::default(),

files_with_errors: HashMap::new(),
}
}
Expand Down
23 changes: 19 additions & 4 deletions tooling/lsp/src/notifications/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@ use std::collections::HashSet;
use std::ops::ControlFlow;
use std::path::PathBuf;

use crate::insert_all_files_for_workspace_into_file_manager;
use crate::{
insert_all_files_for_workspace_into_file_manager, PackageCacheData, WorkspaceCacheData,
};
use async_lsp::{ErrorCode, LanguageClient, ResponseError};
use fm::{FileManager, FileMap};
use fxhash::FxHashMap as HashMap;
Expand Down Expand Up @@ -79,7 +81,8 @@ pub(super) fn on_did_close_text_document(
state.open_documents_count -= 1;

if state.open_documents_count == 0 {
state.cached_definitions.clear();
state.package_cache.clear();
state.workspace_cache.clear();
}

let document_uri = params.text_document.uri;
Expand Down Expand Up @@ -155,8 +158,15 @@ pub(crate) fn process_workspace_for_noir_document(
Some(&file_path),
);
state.cached_lenses.insert(document_uri.to_string(), collected_lenses);
state.cached_definitions.insert(package.root_dir.clone(), context.def_interner);
state.cached_def_maps.insert(package.root_dir.clone(), context.def_maps);
state.package_cache.insert(
package.root_dir.clone(),
PackageCacheData {
crate_id,
crate_graph: context.crate_graph,
node_interner: context.def_interner,
def_maps: context.def_maps,
},
);

let fm = &context.file_manager;
let files = fm.as_file_map();
Expand All @@ -166,6 +176,11 @@ pub(crate) fn process_workspace_for_noir_document(
}
}

state.workspace_cache.insert(
workspace.root_dir.clone(),
WorkspaceCacheData { file_manager: workspace_file_manager },
);

Ok(())
}

Expand Down
82 changes: 70 additions & 12 deletions tooling/lsp/src/requests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use std::collections::BTreeMap;
use std::path::PathBuf;
use std::{collections::HashMap, future::Future};

use crate::insert_all_files_for_workspace_into_file_manager;
use crate::{insert_all_files_for_workspace_into_file_manager, parse_diff, PackageCacheData};
use crate::{
parse_diff, resolve_workspace_for_source_path,
resolve_workspace_for_source_path,
types::{CodeLensOptions, InitializeParams},
};
use async_lsp::{ErrorCode, ResponseError};
Expand Down Expand Up @@ -407,7 +407,7 @@ pub(crate) struct ProcessRequestCallbackArgs<'a> {
location: noirc_errors::Location,
files: &'a FileMap,
interner: &'a NodeInterner,
interners: &'a HashMap<PathBuf, NodeInterner>,
package_cache: &'a HashMap<PathBuf, PackageCacheData>,
crate_id: CrateId,
crate_name: String,
dependencies: &'a Vec<Dependency>,
Expand All @@ -432,6 +432,63 @@ where
ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file")
})?;

// In practice when `process_request` is called, a document in the project should already have been
// open so both the workspace and package cache will have data. However, just in case this isn't true
// for some reason, and also for tests (some tests just test a request without going through the full
// LSP workflow), we have a fallback where we type-check the workspace/package, then continue with
// processing the request.
let Some(workspace_cache_data) = state.workspace_cache.get(&workspace.root_dir) else {
return process_request_no_workspace_cache(state, text_document_position_params, callback);
};

let Some(package_cache_data) = state.package_cache.get(&package.root_dir) else {
return process_request_no_workspace_cache(state, text_document_position_params, callback);
};

let file_manager = &workspace_cache_data.file_manager;
let interner = &package_cache_data.node_interner;
let def_maps = &package_cache_data.def_maps;
let crate_graph = &package_cache_data.crate_graph;
let crate_id = package_cache_data.crate_id;

let files = file_manager.as_file_map();

let location = position_to_location(
files,
&PathString::from(file_path),
&text_document_position_params.position,
)?;

Ok(callback(ProcessRequestCallbackArgs {
location,
files,
interner,
package_cache: &state.package_cache,
crate_id,
crate_name: package.name.to_string(),
dependencies: &crate_graph[crate_id].dependencies,
def_maps,
}))
}

pub(crate) fn process_request_no_workspace_cache<F, T>(
state: &mut LspState,
text_document_position_params: TextDocumentPositionParams,
callback: F,
) -> Result<T, ResponseError>
where
F: FnOnce(ProcessRequestCallbackArgs) -> T,
{
let file_path =
text_document_position_params.text_document.uri.to_file_path().map_err(|_| {
ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path")
})?;

let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap();
let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| {
ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file")
})?;

let mut workspace_file_manager = workspace.new_file_manager();
insert_all_files_for_workspace_into_file_manager(
state,
Expand All @@ -445,17 +502,17 @@ where

let interner;
let def_maps;
if let Some(def_interner) = state.cached_definitions.get(&package.root_dir) {
interner = def_interner;
def_maps = state.cached_def_maps.get(&package.root_dir).unwrap();
if let Some(package_cache) = state.package_cache.get(&package.root_dir) {
interner = &package_cache.node_interner;
def_maps = &package_cache.def_maps;
} else {
// We ignore the warnings and errors produced by compilation while resolving the definition
let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default());
interner = &context.def_interner;
def_maps = &context.def_maps;
}

let files = context.file_manager.as_file_map();
let files = workspace_file_manager.as_file_map();

let location = position_to_location(
files,
Expand All @@ -467,17 +524,18 @@ where
location,
files,
interner,
interners: &state.cached_definitions,
package_cache: &state.package_cache,
crate_id,
crate_name: package.name.to_string(),
dependencies: &context.crate_graph[context.root_crate_id()].dependencies,
dependencies: &context.crate_graph[crate_id].dependencies,
def_maps,
}))
}

pub(crate) fn find_all_references_in_workspace(
location: noirc_errors::Location,
interner: &NodeInterner,
cached_interners: &HashMap<PathBuf, NodeInterner>,
package_cache: &HashMap<PathBuf, PackageCacheData>,
files: &FileMap,
include_declaration: bool,
include_self_type_name: bool,
Expand All @@ -499,10 +557,10 @@ pub(crate) fn find_all_references_in_workspace(
include_declaration,
include_self_type_name,
);
for interner in cached_interners.values() {
for cache_data in package_cache.values() {
locations.extend(find_all_references(
referenced_location,
interner,
&cache_data.node_interner,
files,
include_declaration,
include_self_type_name,
Expand Down
2 changes: 1 addition & 1 deletion tooling/lsp/src/requests/references.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub(crate) fn on_references_request(
find_all_references_in_workspace(
args.location,
args.interner,
args.interners,
args.package_cache,
args.files,
include_declaration,
true,
Expand Down
2 changes: 1 addition & 1 deletion tooling/lsp/src/requests/rename.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ pub(crate) fn on_rename_request(
let rename_changes = find_all_references_in_workspace(
args.location,
args.interner,
args.interners,
args.package_cache,
args.files,
true,
false,
Expand Down

0 comments on commit c48a4f8

Please sign in to comment.