diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f6580c3e..7a280fde 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -46,6 +46,12 @@ jobs: - name: Lint run: yarn lint + - name: Download foundryup + run: curl -L https://foundry.paradigm.xyz | bash + + - name: Install foundry + run: ~/.foundry/bin/foundryup + - name: Run server tests run: yarn run test:codecov diff --git a/client/.eslintrc.js b/client/.eslintrc.js index a0997984..efabae2a 100644 --- a/client/.eslintrc.js +++ b/client/.eslintrc.js @@ -6,4 +6,13 @@ module.exports = { project: `${__dirname}/tsconfig.json`, sourceType: "module", }, + overrides: [ + { + files: ["**/*.ts"], + rules: { + "@typescript-eslint/no-unused-vars": "warn", + "@typescript-eslint/no-empty-function": "warn", + }, + }, + ], }; diff --git a/client/package.json b/client/package.json index 9de1f5d6..c9a2ec5c 100644 --- a/client/package.json +++ b/client/package.json @@ -22,7 +22,7 @@ "dependencies": { "@sentry/node": "6.19.1", "prettier": "2.5.1", - "prettier-plugin-solidity": "1.0.0-beta.24", + "prettier-plugin-solidity": "1.0.0", "vscode-languageclient": "^7.0.0" } } diff --git a/client/src/extension.ts b/client/src/extension.ts index afbd5126..42095800 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -38,6 +38,10 @@ export async function activate(context: ExtensionContext) { showAnalyticsAllowPopup(extensionState); // eslint-disable-next-line @typescript-eslint/no-floating-promises warnOnOtherSolidityExtensions(extensionState); + + return { + isReady: () => extensionState?.indexingFinished, + }; } export function deactivate() { diff --git a/client/src/languageitems/hardhatProject.ts b/client/src/languageitems/hardhatProject.ts deleted file mode 100644 index afa34aa4..00000000 --- a/client/src/languageitems/hardhatProject.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { languages, LanguageStatusSeverity, Uri } from "vscode"; -import { RequestType } from "vscode-languageclient/node"; -import { ExtensionState } from "../types"; -import { ensureFilePrefix } from "../utils/files"; - -interface GetSolFileDetailsParams { - uri: string; -} - -type GetSolFileDetailsResponse = - | { found: false } - | { found: true; hardhat: false } - | { - found: true; - hardhat: true; - configPath: string; - configDisplayPath: string; - }; - -const GetSolFileDetails = new RequestType< - GetSolFileDetailsParams, - GetSolFileDetailsResponse, - void ->("solidity/getSolFileDetails"); - -export async function updateHardhatProjectLanguageItem( - extensionState: ExtensionState, - params: GetSolFileDetailsParams -) { - if (!extensionState.client) { - return; - } - - const response = await extensionState.client.sendRequest(GetSolFileDetails, { - uri: ensureFilePrefix(params.uri), - }); - - if (extensionState.hardhatConfigStatusItem === null) { - const statusItem = languages.createLanguageStatusItem( - "hardhat-config-file", - { - language: "solidity", - } - ); - - extensionState.hardhatConfigStatusItem = statusItem; - } - - if (!response.found || !response.hardhat) { - extensionState.hardhatConfigStatusItem.severity = - LanguageStatusSeverity.Warning; - extensionState.hardhatConfigStatusItem.text = - "No related Hardhat config file found"; - - extensionState.hardhatConfigStatusItem.command = undefined; - - return; - } - - if (response.found && response.hardhat) { - extensionState.hardhatConfigStatusItem.text = response.configDisplayPath; - extensionState.hardhatConfigStatusItem.command = { - title: "Open config file", - command: "vscode.open", - arguments: [Uri.file(response.configPath)], - }; - - return; - } - - return clearHardhatConfigState(extensionState); -} - -export function clearHardhatConfigState(extensionState: ExtensionState): void { - if (extensionState.hardhatConfigStatusItem === null) { - return; - } - - extensionState.hardhatConfigStatusItem.dispose(); - extensionState.hardhatConfigStatusItem = null; -} diff --git a/client/src/popups/setupIndexingHooks.ts b/client/src/popups/setupIndexingHooks.ts deleted file mode 100644 index b960f3db..00000000 --- a/client/src/popups/setupIndexingHooks.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { - workspace, - window, - TextDocument, - languages, - LanguageStatusSeverity, - LanguageStatusItem, -} from "vscode"; -import { LanguageClient } from "vscode-languageclient/node"; -import { updateHardhatProjectLanguageItem } from "../languageitems/hardhatProject"; -import { ExtensionState } from "../types"; - -interface IndexFileData { - jobId: number; - path: string; - current: number; - total: number; -} - -export function setupIndexingHooks( - extensionState: ExtensionState, - client: LanguageClient -): void { - client - .onReady() - .then(() => { - const indexingStartDisposable = client.onNotification( - "custom/indexing-start", - (data: IndexFileData) => { - const indexingStatusItem = setupIndexingLanguageStatusItem( - data.jobId - ); - - extensionState.currentIndexingJobs.push(indexingStatusItem); - } - ); - - const indexDisposable = client.onNotification( - "custom/indexing-file", - async (data: IndexFileData) => { - const jobId = data.jobId.toString(); - const indexingStatusItem = extensionState.currentIndexingJobs.find( - (si) => si.id === jobId - ); - - if (!indexingStatusItem) { - return; - } - - if (indexingStatusItem.detail === undefined) { - indexingStatusItem.detail = `${data.total} files`; - } - - // check to display language status item - if ( - window.activeTextEditor && - window.activeTextEditor.document.uri.path.endsWith(data.path) - ) { - await updateHardhatProjectLanguageItem(extensionState, { - uri: window.activeTextEditor.document.uri.path, - }); - } - - if (data.total !== data.current) { - return; - } - - if (window.activeTextEditor && data.total === 0) { - await updateHardhatProjectLanguageItem(extensionState, { - uri: window.activeTextEditor.document.uri.path, - }); - } - - indexingStatusItem.busy = false; - indexingStatusItem.dispose(); - } - ); - - const workerInitializedDisposable = client.onNotification( - "custom/worker-initialized", - (data: { projectBasePath: string }) => { - // Files that were open on vscode load, will - // have swallowed the `didChange` event as the - // language server wasn't intialized yet. We - // revalidate open editor files after indexing - // to ensure warning and errors appear on startup. - triggerValidationForOpenDoc(client, data.projectBasePath); - } - ); - - extensionState.listenerDisposables.push(indexingStartDisposable); - extensionState.listenerDisposables.push(indexDisposable); - extensionState.listenerDisposables.push(workerInitializedDisposable); - }) - .catch((reason) => extensionState.logger.error(reason)); -} - -function setupIndexingLanguageStatusItem(jobId: number): LanguageStatusItem { - const statusItem = languages.createLanguageStatusItem(jobId.toString(), { - language: "solidity", - }); - - statusItem.severity = LanguageStatusSeverity.Information; - statusItem.name = `Indexing`; - statusItem.text = `Scanning for sol files`; - statusItem.detail = undefined; - statusItem.busy = true; - - return statusItem; -} - -/** - * If the doc is open, trigger a noop change on the server to start validation. - */ -function triggerValidationForOpenDoc( - client: LanguageClient, - projectBasePath: string -) { - workspace.textDocuments.forEach((doc) => { - // Only trigger files that belong to the project whose worker is ready - if (doc.uri.path.includes(projectBasePath)) { - notifyOfNoopChange(client, doc); - } - }); -} - -/** - * Sends a no-op change notification to the server, this allows the - * triggering of validation. - * @param client the language client - * @param textDoc the open text file to trigger validation on - */ -function notifyOfNoopChange(client: LanguageClient, textDoc: TextDocument) { - client.sendNotification("textDocument/didChange", { - textDocument: { - version: textDoc.version, - uri: textDoc.uri.toString(), - }, - contentChanges: [ - { - range: { - start: { line: 0, character: 0 }, - end: { line: 0, character: 0 }, - }, - rangeLength: 1, - text: "", - }, - ], - }); -} diff --git a/client/src/setup/onDidChangeActiveTextEditor.ts b/client/src/setup/onDidChangeActiveTextEditor.ts deleted file mode 100644 index f259d749..00000000 --- a/client/src/setup/onDidChangeActiveTextEditor.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { TextEditor } from "vscode"; -import { ExtensionState } from "../types"; -import { - clearHardhatConfigState, - updateHardhatProjectLanguageItem, -} from "../languageitems/hardhatProject"; - -export function onDidChangeActiveTextEditor(extensionState: ExtensionState) { - return async (e: TextEditor | undefined) => { - if (!e || e.document?.languageId !== "solidity") { - return clearHardhatConfigState(extensionState); - } - - return updateHardhatProjectLanguageItem(extensionState, { - uri: e.document.uri.path, - }); - }; -} diff --git a/client/src/setup/setupExtensionState.ts b/client/src/setup/setupExtensionState.ts index 1ccd14cb..3c4647dc 100644 --- a/client/src/setup/setupExtensionState.ts +++ b/client/src/setup/setupExtensionState.ts @@ -43,13 +43,14 @@ export function setupExtensionState( hardhatTelemetryEnabled: workspace.getConfiguration("hardhat").get("telemetry") ?? false, globalTelemetryEnabled: env.isTelemetryEnabled, - hardhatConfigStatusItem: null, + projectStatusItems: [], telemetry, outputChannel, commandsOutputChannel, logger, hardhatProjects: [], + indexingFinished: false, }; telemetry.init(extensionState); diff --git a/client/src/setup/setupIndexingHooks.ts b/client/src/setup/setupIndexingHooks.ts new file mode 100644 index 00000000..6e7e428e --- /dev/null +++ b/client/src/setup/setupIndexingHooks.ts @@ -0,0 +1,141 @@ +import { + workspace, + TextDocument, + languages, + LanguageStatusSeverity, + LanguageStatusItem, + Uri, +} from "vscode"; +import { LanguageClient } from "vscode-languageclient/node"; +import { ExtensionState, Project } from "../types"; + +const INDEXING_JOB_ID = "indexing"; + +export function setupIndexingHooks( + extensionState: ExtensionState, + client: LanguageClient +): void { + client + .onReady() + .then(() => { + const indexingStartDisposable = client.onNotification( + "custom/indexing-start", + () => { + extensionState.indexingFinished = false; + + const indexingStatusItem = setupIndexingLanguageStatusItem(); + + extensionState.currentIndexingJobs.push(indexingStatusItem); + } + ); + + const indexingEndDisposable = client.onNotification( + "custom/indexing-end", + () => { + extensionState.indexingFinished = true; + + const indexingStatusItem = extensionState.currentIndexingJobs.find( + (statusItem) => statusItem.id === INDEXING_JOB_ID + ); + + indexingStatusItem?.dispose(); + + triggerValidationForOpenDoc(client); + } + ); + + const fileIndexedDisposable = client.onNotification( + "custom/file-indexed", + ({ uri, project }: { uri: string; project: Project }) => { + // Show the project associated to a given contract file as a status item + const statusItem = findOrCreateProjectStatusItem(extensionState, uri); + + statusItem.severity = LanguageStatusSeverity.Information; + statusItem.text = `Project: ${project.frameworkName}`; + if (project.configPath !== undefined) { + statusItem.command = { + title: "Open config file", + command: "vscode.open", + arguments: [Uri.file(project.configPath)], + }; + } + } + ); + + extensionState.listenerDisposables.push(indexingStartDisposable); + extensionState.listenerDisposables.push(indexingEndDisposable); + extensionState.listenerDisposables.push(fileIndexedDisposable); + }) + .catch((reason) => extensionState.logger.error(reason)); +} + +function findOrCreateProjectStatusItem( + extensionState: ExtensionState, + uri: string +) { + const foundStatusItem = extensionState.projectStatusItems.find( + (item) => item.id === `project-${uri}` + ); + + if (foundStatusItem !== undefined) { + return foundStatusItem; + } + + const statusItem = languages.createLanguageStatusItem(`project-${uri}`, { + language: "solidity", + pattern: uri, + }); + + extensionState.projectStatusItems.push(statusItem); + + return statusItem; +} + +function setupIndexingLanguageStatusItem(): LanguageStatusItem { + const statusItem = languages.createLanguageStatusItem(INDEXING_JOB_ID, { + language: "solidity", + }); + + statusItem.severity = LanguageStatusSeverity.Information; + statusItem.name = `Indexing`; + statusItem.text = `Scanning for sol files`; + statusItem.detail = undefined; + statusItem.busy = true; + + return statusItem; +} + +/** + * If the doc is open, trigger a noop change on the server to start validation. + */ +function triggerValidationForOpenDoc(client: LanguageClient) { + workspace.textDocuments.forEach((doc) => { + // Only trigger files that belong to the project whose worker is ready + notifyOfNoopChange(client, doc); + }); +} + +/** + * Sends a no-op change notification to the server, this allows the + * triggering of validation. + * @param client the language client + * @param textDoc the open text file to trigger validation on + */ +function notifyOfNoopChange(client: LanguageClient, textDoc: TextDocument) { + client.sendNotification("textDocument/didChange", { + textDocument: { + version: textDoc.version, + uri: textDoc.uri.toString(), + }, + contentChanges: [ + { + range: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + }, + rangeLength: 1, + text: "", + }, + ], + }); +} diff --git a/client/src/setup/setupLanguageServerHooks.ts b/client/src/setup/setupLanguageServerHooks.ts index 1570051e..955b26ff 100644 --- a/client/src/setup/setupLanguageServerHooks.ts +++ b/client/src/setup/setupLanguageServerHooks.ts @@ -1,4 +1,4 @@ -import { workspace, env, window } from "vscode"; +import { workspace, env } from "vscode"; import { LanguageClient, LanguageClientOptions, @@ -6,9 +6,8 @@ import { TransportKind, } from "vscode-languageclient/node"; import { ExtensionState } from "../types"; -import { setupIndexingHooks } from "../popups/setupIndexingHooks"; -import { setupValidationJobHooks } from "../popups/setupValidationJobHooks"; -import { onDidChangeActiveTextEditor } from "./onDidChangeActiveTextEditor"; +import { setupIndexingHooks } from "./setupIndexingHooks"; +import { setupValidationJobHooks } from "./setupValidationJobHooks"; export function setupLanguageServerHooks(extensionState: ExtensionState) { startLanguageServer(extensionState); @@ -46,7 +45,9 @@ const startLanguageServer = (extensionState: ExtensionState): void => { synchronize: { fileEvents: [ workspace.createFileSystemWatcher("**/hardhat.config.{ts,js}"), - workspace.createFileSystemWatcher("**/contracts/**/*.sol"), + workspace.createFileSystemWatcher("**/foundry.toml"), + workspace.createFileSystemWatcher("**/remappings.txt"), + workspace.createFileSystemWatcher("**/*.sol"), ], }, diagnosticCollectionName: "hardhat-language-server", @@ -107,10 +108,6 @@ const startLanguageServer = (extensionState: ExtensionState): void => { extensionState.listenerDisposables.push(telemetryChangeDisposable); extensionState.listenerDisposables.push(hardhatTelemetryChangeDisposable); - window.onDidChangeActiveTextEditor( - onDidChangeActiveTextEditor(extensionState) - ); - client.start(); extensionState.client = client; diff --git a/client/src/popups/setupValidationJobHooks.ts b/client/src/setup/setupValidationJobHooks.ts similarity index 100% rename from client/src/popups/setupValidationJobHooks.ts rename to client/src/setup/setupValidationJobHooks.ts diff --git a/client/src/types.ts b/client/src/types.ts index f5f4291b..f9e20c2e 100644 --- a/client/src/types.ts +++ b/client/src/types.ts @@ -27,6 +27,13 @@ export interface ExtensionState { commandsOutputChannel: OutputChannel; logger: Logger; - hardhatConfigStatusItem: LanguageStatusItem | null; + projectStatusItems: LanguageStatusItem[]; // 1 per indexed contract. shows project info for that contract hardhatProjects: string[]; + + indexingFinished: boolean; +} + +export interface Project { + configPath?: string; + frameworkName: string; } diff --git a/client/yarn.lock b/client/yarn.lock index 4c7dbc16..72fd9403 100644 --- a/client/yarn.lock +++ b/client/yarn.lock @@ -108,10 +108,10 @@ "@sentry/types" "6.19.1" tslib "^1.9.3" -"@solidity-parser/parser@^0.14.3": - version "0.14.3" - resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.3.tgz#0d627427b35a40d8521aaa933cc3df7d07bfa36f" - integrity sha512-29g2SZ29HtsqA58pLCtopI1P/cPy5/UAzlcAXO6T/CNJimG6yA8kx4NaseMyJULiC+TEs02Y9/yeHzClqoA0hw== +"@solidity-parser/parser@^0.14.5": + version "0.14.5" + resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.5.tgz#87bc3cc7b068e08195c219c91cd8ddff5ef1a804" + integrity sha512-6dKnHZn7fg/iQATVEzqyUOyEidbn05q7YA2mQ9hC0MMXhhV3/JrsxmFSYZAcr7j1yUP700LLhTruvJ3MiQmjJg== dependencies: antlr4ts "^0.5.0-alpha.4" @@ -307,10 +307,10 @@ doctrine@^3.0.0: dependencies: esutils "^2.0.2" -emoji-regex@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.1.0.tgz#d50e383743c0f7a5945c47087295afc112e3cf66" - integrity sha512-xAEnNCT3w2Tg6MA7ly6QqYJvEoY1tm9iIjJ3yMKK9JPlWuRHAMoe5iETwQnx3M9TVbFMfsrBgWKR+IsmswwNjg== +emoji-regex@^10.2.1: + version "10.2.1" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.2.1.tgz#a41c330d957191efd3d9dfe6e1e8e1e9ab048b3f" + integrity sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA== emoji-regex@^8.0.0: version "8.0.0" @@ -706,15 +706,15 @@ prelude-ls@^1.2.1: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prettier-plugin-solidity@1.0.0-beta.24: - version "1.0.0-beta.24" - resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-beta.24.tgz#67573ca87098c14f7ccff3639ddd8a4cab2a87eb" - integrity sha512-6JlV5BBTWzmDSq4kZ9PTXc3eLOX7DF5HpbqmmaF+kloyUwOZbJ12hIYsUaZh2fVgZdV2t0vWcvY6qhILhlzgqg== +prettier-plugin-solidity@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0.tgz#5b23f48cc9c28a1246c6dd89af117234b813f48b" + integrity sha512-gRJCeZ7imbWtNYN2SudjJoPmka5r6jcd2cSTV6FC3pVCtY6LFZbeQQjpKufUEp88hXBAAnkOTOh7TA5xwj9M3A== dependencies: - "@solidity-parser/parser" "^0.14.3" - emoji-regex "^10.1.0" + "@solidity-parser/parser" "^0.14.5" + emoji-regex "^10.2.1" escape-string-regexp "^4.0.0" - semver "^7.3.7" + semver "^7.3.8" solidity-comments-extractor "^0.0.7" string-width "^4.2.3" @@ -762,10 +762,10 @@ semver@^7.2.1, semver@^7.3.4: dependencies: lru-cache "^6.0.0" -semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== +semver@^7.3.8: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== dependencies: lru-cache "^6.0.0" diff --git a/package.json b/package.json index e68436f7..f33cdce7 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,8 @@ }, "activationEvents": [ "onLanguage:solidity", - "workspaceContains:**/hardhat.config.ts" + "workspaceContains:**/hardhat.config.{ts,js}", + "workspaceContains:**/foundry.toml" ], "scripts": { "build": "tsc -b ./client/tsconfig.json && tsc -b ./server/tsconfig.build.json && tsc -b", diff --git a/scripts/bundle.js b/scripts/bundle.js index c2d410e3..1c30817f 100755 --- a/scripts/bundle.js +++ b/scripts/bundle.js @@ -54,13 +54,15 @@ async function main() { entryPoints: { "./client/out/extension": "./client/src/extension.ts", "./server/out/index": "./server/src/index.ts", - "./server/out/worker": "./server/src/services/validation/worker.ts", + "./server/out/hardhat.config": "./server/src/hardhat.config.ts", + "./server/out/worker/WorkerProcess": + "./server/src/frameworks/Hardhat/worker/WorkerProcess.ts", }, bundle: true, minifyWhitespace: true, minifyIdentifiers: false, minifySyntax: true, - external: ["vscode", "@nomicfoundation/solidity-analyzer"], + external: ["vscode", "@nomicfoundation/solidity-analyzer", "fsevents"], platform: "node", outdir: ".", logLevel: "info", diff --git a/server/.eslintrc.js b/server/.eslintrc.js index a0997984..702ecb72 100644 --- a/server/.eslintrc.js +++ b/server/.eslintrc.js @@ -6,4 +6,14 @@ module.exports = { project: `${__dirname}/tsconfig.json`, sourceType: "module", }, + overrides: [ + { + files: ["**/*.ts"], + rules: { + "@typescript-eslint/no-unused-vars": "warn", + "@typescript-eslint/no-empty-function": "warn", + "no-empty": "warn", + }, + }, + ], }; diff --git a/server/.nycrc b/server/.nycrc index ddf38e87..696d210c 100644 --- a/server/.nycrc +++ b/server/.nycrc @@ -1,10 +1,10 @@ { "extends": "@istanbuljs/nyc-config-typescript", "check-coverage": true, - "statements": 71, - "branches": 55, - "functions": 66, - "lines": 71, + "statements": 50, + "branches": 40, + "functions": 50, + "lines": 50, "all": true, "include": [ "src/**/*.ts" diff --git a/server/package.json b/server/package.json index de77a1e2..17a961f5 100644 --- a/server/package.json +++ b/server/package.json @@ -37,6 +37,8 @@ "@types/node": "^14.14.37", "@types/qs": "^6.9.7", "@types/sinon": "10.0.6", + "@types/lodash": "^4.14.185", + "@types/semver": "^7.3.12", "@types/uuid": "^8.3.1", "chai": "4.3.4", "codecov": "3.8.3", @@ -50,7 +52,7 @@ "typescript": "4.5.4" }, "dependencies": { - "@nomicfoundation/solidity-analyzer": "0.0.3", + "@nomicfoundation/solidity-analyzer": "0.1.0", "@sentry/node": "6.19.1", "@sentry/tracing": "6.19.1", "@solidity-parser/parser": "^0.14.0", @@ -61,10 +63,12 @@ "got": "^11.8.2", "hardhat": "^2.6.0", "js-yaml": "^4.1.0", + "lodash": "^4.17.21", "module-alias": "^2.2.2", "prettier": "2.5.1", - "prettier-plugin-solidity": "1.0.0-beta.24", + "prettier-plugin-solidity": "1.0.0", "qs": "^6.10.1", + "semver": "^7.3.7", "serialize-error": "8.1.0", "uuid": "^8.3.2", "vscode-languageserver": "^7.0.0", diff --git a/server/src/compilerDiagnostics/conversions/attemptConstrainToContractName.ts b/server/src/compilerDiagnostics/conversions/attemptConstrainToContractName.ts index 2891a155..474cda4f 100644 --- a/server/src/compilerDiagnostics/conversions/attemptConstrainToContractName.ts +++ b/server/src/compilerDiagnostics/conversions/attemptConstrainToContractName.ts @@ -1,10 +1,10 @@ import { TextDocument, Diagnostic } from "@common/types"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; import { constrainByRegex } from "./constrainByRegex"; export function attemptConstrainToContractName( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return constrainByRegex( document, diff --git a/server/src/compilerDiagnostics/conversions/attemptConstrainToFunctionName.ts b/server/src/compilerDiagnostics/conversions/attemptConstrainToFunctionName.ts index b1bc413a..02a62650 100644 --- a/server/src/compilerDiagnostics/conversions/attemptConstrainToFunctionName.ts +++ b/server/src/compilerDiagnostics/conversions/attemptConstrainToFunctionName.ts @@ -1,10 +1,10 @@ import { TextDocument, Diagnostic } from "@common/types"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; import { constrainByRegex } from "./constrainByRegex"; export function attemptConstrainToFunctionName( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return constrainByRegex(document, error, /(?<=function\s+)[^\s]+(?=\s*\()/gm); } diff --git a/server/src/compilerDiagnostics/conversions/constrainByRegex.ts b/server/src/compilerDiagnostics/conversions/constrainByRegex.ts index 2bee15a3..3d965d60 100644 --- a/server/src/compilerDiagnostics/conversions/constrainByRegex.ts +++ b/server/src/compilerDiagnostics/conversions/constrainByRegex.ts @@ -1,10 +1,10 @@ import { TextDocument, Range } from "@common/types"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; import { passThroughConversion } from "./passThroughConversion"; export function constrainByRegex( document: TextDocument, - error: HardhatCompilerError, + error: SolcError, regex: RegExp ) { if (error.sourceLocation === undefined) { diff --git a/server/src/compilerDiagnostics/conversions/passThroughConversion.ts b/server/src/compilerDiagnostics/conversions/passThroughConversion.ts index a8a34639..4c6c36d2 100644 --- a/server/src/compilerDiagnostics/conversions/passThroughConversion.ts +++ b/server/src/compilerDiagnostics/conversions/passThroughConversion.ts @@ -1,9 +1,9 @@ import { TextDocument, Range, DiagnosticSeverity } from "@common/types"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; export function passThroughConversion( document: TextDocument, - error: HardhatCompilerError + error: SolcError ) { if (!error.sourceLocation) { throw new Error("No source location"); diff --git a/server/src/compilerDiagnostics/diagnostics/AddLicenseIdentifier.ts b/server/src/compilerDiagnostics/diagnostics/AddLicenseIdentifier.ts index e51a1480..eb7dff46 100644 --- a/server/src/compilerDiagnostics/diagnostics/AddLicenseIdentifier.ts +++ b/server/src/compilerDiagnostics/diagnostics/AddLicenseIdentifier.ts @@ -6,7 +6,7 @@ import { Range, } from "vscode-languageserver/node"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { passThroughConversion } from "../conversions/passThroughConversion"; const LICENSE_STATEMENT = "// SPDX-License-Identifier: $LICENSE"; @@ -29,7 +29,7 @@ export class AddLicenseIdentifier implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return passThroughConversion(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/AddMultiOverrideSpecifier.ts b/server/src/compilerDiagnostics/diagnostics/AddMultiOverrideSpecifier.ts index e543aef0..54097208 100644 --- a/server/src/compilerDiagnostics/diagnostics/AddMultiOverrideSpecifier.ts +++ b/server/src/compilerDiagnostics/diagnostics/AddMultiOverrideSpecifier.ts @@ -6,7 +6,7 @@ import { import { TextDocument } from "vscode-languageserver-textdocument"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; import { attemptConstrainToFunctionName } from "../conversions/attemptConstrainToFunctionName"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { Multioverride, resolveInsertSpecifierQuickFix, @@ -20,7 +20,7 @@ export class AddMultiOverrideSpecifier implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToFunctionName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/AddOverrideSpecifier.ts b/server/src/compilerDiagnostics/diagnostics/AddOverrideSpecifier.ts index f926c399..6dc9eaaa 100644 --- a/server/src/compilerDiagnostics/diagnostics/AddOverrideSpecifier.ts +++ b/server/src/compilerDiagnostics/diagnostics/AddOverrideSpecifier.ts @@ -2,7 +2,7 @@ import { CodeAction, Diagnostic } from "vscode-languageserver/node"; import { TextDocument } from "vscode-languageserver-textdocument"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; import { attemptConstrainToFunctionName } from "../conversions/attemptConstrainToFunctionName"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { resolveInsertSpecifierQuickFix } from "./common/resolveInsertSpecifierQuickFix"; export class AddOverrideSpecifier implements CompilerDiagnostic { @@ -11,7 +11,7 @@ export class AddOverrideSpecifier implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToFunctionName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/AddVirtualSpecifier.ts b/server/src/compilerDiagnostics/diagnostics/AddVirtualSpecifier.ts index a390541c..d9eea22a 100644 --- a/server/src/compilerDiagnostics/diagnostics/AddVirtualSpecifier.ts +++ b/server/src/compilerDiagnostics/diagnostics/AddVirtualSpecifier.ts @@ -2,7 +2,7 @@ import { CodeAction, Diagnostic } from "vscode-languageserver/node"; import { TextDocument } from "vscode-languageserver-textdocument"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; import { attemptConstrainToFunctionName } from "../conversions/attemptConstrainToFunctionName"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { resolveInsertSpecifierQuickFix } from "./common/resolveInsertSpecifierQuickFix"; export class AddVirtualSpecifier implements CompilerDiagnostic { @@ -11,7 +11,7 @@ export class AddVirtualSpecifier implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToFunctionName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/ConstrainMutability.ts b/server/src/compilerDiagnostics/diagnostics/ConstrainMutability.ts index 15356517..d527cc50 100644 --- a/server/src/compilerDiagnostics/diagnostics/ConstrainMutability.ts +++ b/server/src/compilerDiagnostics/diagnostics/ConstrainMutability.ts @@ -7,7 +7,7 @@ import { import { TextDocument } from "vscode-languageserver-textdocument"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; import { attemptConstrainToFunctionName } from "../conversions/attemptConstrainToFunctionName"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { parseFunctionDefinition, ParseFunctionDefinitionResult, @@ -20,7 +20,7 @@ export class ConstrainMutability implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToFunctionName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/ContractCodeSize.ts b/server/src/compilerDiagnostics/diagnostics/ContractCodeSize.ts index 109612d8..fe76562e 100644 --- a/server/src/compilerDiagnostics/diagnostics/ContractCodeSize.ts +++ b/server/src/compilerDiagnostics/diagnostics/ContractCodeSize.ts @@ -2,7 +2,7 @@ import { CodeAction, Diagnostic } from "vscode-languageserver/node"; import { TextDocument } from "vscode-languageserver-textdocument"; import { attemptConstrainToContractName } from "@compilerDiagnostics/conversions/attemptConstrainToContractName"; import { CompilerDiagnostic } from "../types"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; export class ContractCodeSize implements CompilerDiagnostic { public code = "5574"; @@ -10,7 +10,7 @@ export class ContractCodeSize implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToContractName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/MarkContractAbstract.ts b/server/src/compilerDiagnostics/diagnostics/MarkContractAbstract.ts index e1898cb0..000d717a 100644 --- a/server/src/compilerDiagnostics/diagnostics/MarkContractAbstract.ts +++ b/server/src/compilerDiagnostics/diagnostics/MarkContractAbstract.ts @@ -7,7 +7,7 @@ import { import { TextDocument } from "vscode-languageserver-textdocument"; import { attemptConstrainToContractName } from "@compilerDiagnostics/conversions/attemptConstrainToContractName"; import { ResolveActionsContext } from "../types"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { parseContractDefinition, ParseContractDefinitionResult, @@ -20,7 +20,7 @@ export class MarkContractAbstract { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToContractName(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/SpecifyCompilerVersion.ts b/server/src/compilerDiagnostics/diagnostics/SpecifyCompilerVersion.ts index dba23fcf..05a8ca34 100644 --- a/server/src/compilerDiagnostics/diagnostics/SpecifyCompilerVersion.ts +++ b/server/src/compilerDiagnostics/diagnostics/SpecifyCompilerVersion.ts @@ -6,7 +6,7 @@ import { Range, } from "vscode-languageserver/node"; import { CompilerDiagnostic, ResolveActionsContext } from "../types"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { passThroughConversion } from "../conversions/passThroughConversion"; /** @@ -22,7 +22,7 @@ export class SpecifyCompilerVersion implements CompilerDiagnostic { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return passThroughConversion(document, error); } diff --git a/server/src/compilerDiagnostics/diagnostics/SpecifyVisibility.ts b/server/src/compilerDiagnostics/diagnostics/SpecifyVisibility.ts index 6e54f6dd..4e015c8e 100644 --- a/server/src/compilerDiagnostics/diagnostics/SpecifyVisibility.ts +++ b/server/src/compilerDiagnostics/diagnostics/SpecifyVisibility.ts @@ -7,7 +7,7 @@ import { import { TextDocument } from "vscode-languageserver-textdocument"; import { ResolveActionsContext } from "../types"; import { attemptConstrainToFunctionName } from "../conversions/attemptConstrainToFunctionName"; -import { HardhatCompilerError, ServerState } from "../../types"; +import { SolcError, ServerState } from "../../types"; import { parseFunctionDefinition } from "./parsing/parseFunctionDefinition"; import { lookupToken } from "./parsing/lookupToken"; @@ -21,7 +21,7 @@ export class SpecifyVisibility { public fromHardhatCompilerError( document: TextDocument, - error: HardhatCompilerError + error: SolcError ): Diagnostic { return attemptConstrainToFunctionName(document, error); } diff --git a/server/src/compilerDiagnostics/types.ts b/server/src/compilerDiagnostics/types.ts index 8edefe59..7563cd7c 100644 --- a/server/src/compilerDiagnostics/types.ts +++ b/server/src/compilerDiagnostics/types.ts @@ -1,6 +1,6 @@ import { CodeAction, Diagnostic } from "vscode-languageserver/node"; import { TextDocument } from "vscode-languageserver-textdocument"; -import { HardhatCompilerError, ServerState } from "../types"; +import { SolcError, ServerState } from "../types"; export interface ResolveActionsContext { document: TextDocument; @@ -19,6 +19,6 @@ export interface CompilerDiagnostic { fromHardhatCompilerError: ( document: TextDocument, - error: HardhatCompilerError + error: SolcError ) => Diagnostic; } diff --git a/server/src/frameworks/Foundry/FoundryIndexer.ts b/server/src/frameworks/Foundry/FoundryIndexer.ts new file mode 100644 index 00000000..b517c0cd --- /dev/null +++ b/server/src/frameworks/Foundry/FoundryIndexer.ts @@ -0,0 +1,22 @@ +import path from "path"; +import { WorkspaceFolder } from "vscode-languageserver-protocol"; +import { decodeUriAndRemoveFilePrefix } from "../../utils"; +import { ProjectIndexer } from "../base/ProjectIndexer"; +import { FoundryProject } from "./FoundryProject"; + +export class FoundryIndexer extends ProjectIndexer { + public async index(folder: WorkspaceFolder) { + const uri = decodeUriAndRemoveFilePrefix(folder.uri); + const configFiles = await this.fileRetriever.findFiles( + uri, + "**/foundry.toml", + ["**/lib/**"] + ); + + return configFiles.map((configFile) => { + const basePath = path.dirname(configFile); + + return new FoundryProject(this.serverState, basePath, configFile); + }); + } +} diff --git a/server/src/frameworks/Foundry/FoundryProject.ts b/server/src/frameworks/Foundry/FoundryProject.ts new file mode 100644 index 00000000..88326145 --- /dev/null +++ b/server/src/frameworks/Foundry/FoundryProject.ts @@ -0,0 +1,193 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import fs from "fs"; +import _ from "lodash"; +import path from "path"; +import { + CompletionItem, + DidChangeWatchedFilesParams, + Position, +} from "vscode-languageserver-protocol"; +import { OpenDocuments, ServerState } from "../../types"; +import { toUnixStyle } from "../../utils"; +import { directoryContains } from "../../utils/directoryContains"; +import { runCmd, runningOnWindows } from "../../utils/operatingSystem"; +import { CompilationDetails } from "../base/CompilationDetails"; +import { Project } from "../base/Project"; +import { buildBasicCompilation } from "../shared/buildBasicCompilation"; +import { getImportCompletions } from "./getImportCompletions"; +import { Remapping } from "./Remapping"; + +export class FoundryProject extends Project { + public priority = 1; + public sourcesPath!: string; + public testsPath!: string; + public remappings: Remapping[] = []; + public initializeError?: string; + public configSolcVersion?: string; + + constructor( + serverState: ServerState, + basePath: string, + public configPath: string + ) { + super(serverState, basePath); + } + + public id(): string { + return this.configPath; + } + + public frameworkName(): string { + return "Foundry"; + } + + public async initialize(): Promise { + try { + const forgePath = runningOnWindows() + ? "%USERPROFILE%\\.cargo\\bin\\forge" + : "~/.foundry/bin/forge"; + const config = JSON.parse( + await runCmd(`${forgePath} config --json`, this.basePath) + ); + this.sourcesPath = path.join(this.basePath, config.src); + this.testsPath = path.join(this.basePath, config.test); + this.configSolcVersion = config.solc || undefined; // may come as null otherwise + + const rawRemappings = await runCmd( + `${forgePath} remappings`, + this.basePath + ); + this.remappings = this._parseRemappings(rawRemappings); + } catch (error: any) { + this.serverState.logger.error(error.toString()); + + switch (error.code) { + case 127: + this.initializeError = + "Couldn't run `forge`. Please check that your foundry installation is correct."; + break; + case 134: + this.initializeError = + "Running `forge` failed. Please check that your foundry.toml file is correct."; + break; + default: + this.initializeError = `Unexpected error while running \`forge\`: ${error}`; + } + } + + return; + } + + public async fileBelongs(uri: string) { + if (this.initializeError === undefined) { + // Project was initialized correctly, then check contract is inside source or test folders + return [this.sourcesPath, this.testsPath].some((dir) => + directoryContains(dir, uri) + ); + } else { + // Project could not be initialized. Claim all files under base path to avoid them being incorrectly assigned to other projects + return directoryContains(this.basePath, uri); + } + } + + public async resolveImportPath(file: string, importPath: string) { + try { + let transformedPath = importPath; + + if (!importPath.startsWith(".")) { + for (const { from, to } of this.remappings) { + if (importPath.startsWith(from)) { + transformedPath = path.join(to, importPath.slice(from.length)); + } + } + } + const resolvedPath = require.resolve(transformedPath, { + paths: [fs.realpathSync(path.dirname(file))], + }); + + return toUnixStyle(fs.realpathSync(resolvedPath)); + } catch (error) { + return undefined; + } + } + + public async buildCompilation( + sourceUri: string, + openDocuments: OpenDocuments + ): Promise { + if (this.initializeError !== undefined) { + throw new Error(this.initializeError); + } + + const basicCompilation = await buildBasicCompilation( + this, + sourceUri, + openDocuments, + this.configSolcVersion + ); + + const remappings = this.remappings.map( + (remapping) => `${remapping.from}=${remapping.to}` + ); + + (basicCompilation.input.settings as any).remappings = remappings; // CompilerInput type doesn't have remappings + + return basicCompilation; + } + + public async onWatchedFilesChanges({ + changes, + }: DidChangeWatchedFilesParams): Promise { + for (const change of changes) { + const remappingsPath = path.join(this.basePath, "remappings.txt"); + + if ([this.configPath, remappingsPath].some((uri) => change.uri === uri)) { + this.serverState.logger.info( + `Reinitializing foundry project: ${this.id()}` + ); + + await this.initialize(); + } + } + return; + } + + public getImportCompletions( + position: Position, + currentImport: string + ): CompletionItem[] { + return getImportCompletions( + { + remappings: this.remappings, + solFileIndex: this.serverState.solFileIndex, + }, + position, + currentImport + ); + } + + private _parseRemappings(rawRemappings: string) { + const lines = rawRemappings.trim().split("\n"); + const remappings: Remapping[] = []; + + for (const line of lines) { + const lineTokens = line.split("=", 2); + + if ( + lineTokens.length !== 2 || + lineTokens[0].length === 0 || + lineTokens[1].length === 0 + ) { + continue; + } + + const [from, to] = lineTokens.map((token) => + token.endsWith("/") ? token : `${token}/` + ); + + remappings.push({ from, to: path.join(this.basePath, to) }); + } + + return remappings; + } +} diff --git a/server/src/frameworks/Foundry/Remapping.ts b/server/src/frameworks/Foundry/Remapping.ts new file mode 100644 index 00000000..137eb97d --- /dev/null +++ b/server/src/frameworks/Foundry/Remapping.ts @@ -0,0 +1,4 @@ +export interface Remapping { + from: string; + to: string; +} diff --git a/server/src/frameworks/Foundry/getImportCompletions.ts b/server/src/frameworks/Foundry/getImportCompletions.ts new file mode 100644 index 00000000..4bf547e9 --- /dev/null +++ b/server/src/frameworks/Foundry/getImportCompletions.ts @@ -0,0 +1,73 @@ +import { + CompletionItem, + CompletionItemKind, + Position, +} from "vscode-languageserver-types"; +import { SolFileIndexMap } from "../../parser/common/types"; +import { Remapping } from "./Remapping"; + +interface ImportCompletionContext { + remappings: Remapping[]; + solFileIndex: SolFileIndexMap; +} + +export function getImportCompletions( + ctx: ImportCompletionContext, + position: Position, + currentImport: string +): CompletionItem[] { + if (currentImport === "") { + return _getRemappingKeyCompletions(ctx); + } else { + return _getRemappedContractCompletions(ctx, position, currentImport); + } +} + +function _getRemappingKeyCompletions( + ctx: ImportCompletionContext +): CompletionItem[] { + return ctx.remappings.map((r) => { + const strippedKey = r.from.replace(/\/$/, ""); // Remove trailing slash + return { + label: strippedKey, + insertText: strippedKey, + + kind: CompletionItemKind.Module, + documentation: "Imports the package", + }; + }); +} + +function _getRemappedContractCompletions( + ctx: ImportCompletionContext, + position: Position, + currentImport: string +): CompletionItem[] { + let currentImportRemapped = currentImport; + + for (const remapping of ctx.remappings) { + if (currentImportRemapped.startsWith(remapping.from)) { + currentImportRemapped = currentImportRemapped.replace( + remapping.from, + remapping.to + ); + break; + } + } + + const fileUris = Object.keys(ctx.solFileIndex).filter((uri) => + uri.startsWith(currentImportRemapped) + ); + + const completionLabels = fileUris.map((uri) => + uri.replace(currentImportRemapped, "") + ); + + return completionLabels.map((label) => ({ + label, + insertText: label, + + kind: CompletionItemKind.File, + documentation: "Imports the package", + })); +} diff --git a/server/src/frameworks/Hardhat/HardhatIndexer.ts b/server/src/frameworks/Hardhat/HardhatIndexer.ts new file mode 100644 index 00000000..d3f801cd --- /dev/null +++ b/server/src/frameworks/Hardhat/HardhatIndexer.ts @@ -0,0 +1,25 @@ +import path from "path"; +import { WorkspaceFolder } from "vscode-languageserver-protocol"; +import { decodeUriAndRemoveFilePrefix } from "../../utils"; +import { ProjectIndexer } from "../base/ProjectIndexer"; +import { HardhatProject } from "./HardhatProject"; + +export class HardhatIndexer extends ProjectIndexer { + public async index(folder: WorkspaceFolder) { + const uri = decodeUriAndRemoveFilePrefix(folder.uri); + const configFiles = await this.fileRetriever.findFiles( + uri, + "**/hardhat.config.{ts,js}", + ["**/node_modules/**"] + ); + + return configFiles.map( + (configFile) => + new HardhatProject( + this.serverState, + path.dirname(configFile), + configFile + ) + ); + } +} diff --git a/server/src/frameworks/Hardhat/HardhatProject.ts b/server/src/frameworks/Hardhat/HardhatProject.ts new file mode 100644 index 00000000..0bff5a88 --- /dev/null +++ b/server/src/frameworks/Hardhat/HardhatProject.ts @@ -0,0 +1,372 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ + +import { ChildProcess, fork } from "child_process"; +import _ from "lodash"; +import path from "path"; +import { + CompletionItem, + DidChangeWatchedFilesParams, + Position, +} from "vscode-languageserver-protocol"; +import { OpenDocuments, ServerState } from "../../types"; +import { directoryContains } from "../../utils/directoryContains"; +import { Logger } from "../../utils/Logger"; +import { CompilationDetails } from "../base/CompilationDetails"; +import { Project } from "../base/Project"; +import { getImportCompletions } from "./getImportCompletions"; +import { LogLevel } from "./worker/WorkerLogger"; +import { + BuildCompilationRequest, + BuildCompilationResponse, + ErrorResponseMessage, + FileBelongsRequest, + FileBelongsResponse, + InitializationFailureMessage, + InvalidateBuildCacheMessage, + LogMessage, + Message, + MessageType, + ResolveImportRequest, + ResolveImportResponse, +} from "./worker/WorkerProtocol"; + +export enum WorkerStatus { + UNINITIALIZED, + INITIALIZING, + RUNNING, + ERRORED, +} + +const REQUEST_TIMEOUT = 5000; + +export class HardhatProject extends Project { + public priority = 2; + + public workerProcess?: ChildProcess; + + public workerStatus = WorkerStatus.UNINITIALIZED; + + public workerLoadFailureReason = ""; + + private _onInitialized!: () => void; + + private requestId = 0; + + private _onResponse: { [requestId: number]: (result: any) => void } = {}; + private _onError: { [requestId: number]: (result: any) => void } = {}; + + private logger: Logger; + + private name: string; + + constructor( + serverState: ServerState, + basePath: string, + public configPath: string + ) { + super(serverState, basePath); + this.logger = _.clone(serverState.logger); + this.name = path.basename(basePath); + this.logger.tag = `${this.name}`; + } + + public id(): string { + return this.configPath; + } + + public frameworkName(): string { + return "Hardhat"; + } + + public async initialize(): Promise { + return new Promise((resolve, _reject) => { + this._onInitialized = resolve; + + // Fork WorkerProcess as child process + this.workerProcess = fork( + path.resolve(__dirname, "worker/WorkerProcess.js"), + { + cwd: this.basePath, + detached: true, + execArgv: [], + } + ); + this.workerStatus = WorkerStatus.INITIALIZING; + + this.workerProcess.on("message", async (message: Message) => { + try { + await this._handleMessage(message); + } catch (error) { + this.serverState.telemetry.captureException(error); + this.logger.error( + `Error while handling worker message: ${error}. Full Message: ${JSON.stringify( + message + )}` + ); + } + }); + + this.workerProcess.on("error", (err) => { + this.logger.error(err); + }); + + this.workerProcess.on("exit", async (code) => { + this.logger.error(`Child process exited: ${code}`); + this.workerStatus = WorkerStatus.ERRORED; + this._onInitialized(); + }); + }); + } + + public async fileBelongs(sourceURI: string): Promise { + const workerPromise = new Promise((resolve, reject) => { + this._checkWorkerExists(); + this._checkWorkerNotInitializing(); + + if (this.workerStatus === WorkerStatus.RUNNING) { + // HRE was loaded successfully. Delegate to the worker that will use the configured sources path + const requestId = this._prepareRequest(resolve, reject); + + this.workerProcess!.send(new FileBelongsRequest(requestId, sourceURI)); + } else { + // HRE could not be loaded. Claim ownership of all solidity files under root folder + // This is to avoid potential hardhat-owned contracts being assigned to i.e. projectless + resolve(directoryContains(this.basePath, sourceURI)); + } + }); + + return Promise.race([ + workerPromise, + this._requestTimeout("fileBelongs"), + ]) as Promise; + } + + public async buildCompilation( + sourceUri: string, + openDocuments: OpenDocuments + ): Promise { + const workerPromise = new Promise((resolve, reject) => { + this._checkWorkerExists(); + this._checkWorkerNotInitializing(); + this._checkWorkerNotErrored(); + + const requestId = this._prepareRequest(resolve, reject); + + this.workerProcess!.send( + new BuildCompilationRequest(requestId, sourceUri, openDocuments) + ); + }); + + return Promise.race([ + workerPromise, + this._requestTimeout("buildCompilation"), + ]) as Promise; + } + + private _prepareRequest( + resolve: (value: unknown) => void, + reject: (reason?: any) => void + ): number { + this.requestId++; + + this._onResponse[this.requestId] = resolve; + this._onError[this.requestId] = reject; + + return this.requestId; + } + + public async onWatchedFilesChanges({ + changes, + }: DidChangeWatchedFilesParams): Promise { + for (const change of changes) { + if (change.uri.endsWith(".sol")) { + this.workerProcess?.send(new InvalidateBuildCacheMessage()); + } else if (change.uri === this.configPath) { + this.logger.info(`Config file changed. Restarting worker process.`); + + // Kill existing worker process + this.workerProcess?.kill("SIGKILL"); + + // Spawn new worker process + await this.initialize(); + } + } + } + + public async resolveImportPath(from: string, importPath: string) { + const workerPromise = new Promise((resolve, reject) => { + this._checkWorkerExists(); + this._checkWorkerIsRunning(); + + // HRE was loaded successfully. Delegate to the worker that will use the configured sources path + const requestId = this._prepareRequest(resolve, reject); + + this.workerProcess!.send( + new ResolveImportRequest(requestId, from, importPath, this.basePath) + ); + }); + + return Promise.race([ + workerPromise, + this._requestTimeout("fileBelongs"), + ]) as Promise; + } + + public invalidateBuildCache() { + this.workerProcess?.send(new InvalidateBuildCacheMessage()); + } + + public getImportCompletions( + position: Position, + currentImport: string + ): CompletionItem[] { + return getImportCompletions( + { basePath: this.basePath, solFileIndex: this.serverState.solFileIndex }, + position, + currentImport + ); + } + + private _requestTimeout(label: string) { + return new Promise((_resolve, reject) => { + setTimeout(() => { + reject(`Request (${label}) timed out`); + }, REQUEST_TIMEOUT); + }); + } + + private async _handleMessage(message: Message) { + switch (message.type) { + case MessageType.INITIALIZED: + this._handleInitialized(); + break; + + case MessageType.LOG: + this._handleLog(message as LogMessage); + break; + + case MessageType.ERROR_RESPONSE: + this._handleErrorResponse(message as ErrorResponseMessage); + break; + + case MessageType.FILE_BELONGS_RESPONSE: + this._handleFileBelongsResponse(message as FileBelongsResponse); + break; + + case MessageType.RESOLVE_IMPORT_RESPONSE: + this._handleResolveImportResponse(message as ResolveImportResponse); + break; + + case MessageType.BUILD_COMPILATION_RESPONSE: + this._handleBuildCompilationResponse( + message as BuildCompilationResponse + ); + break; + + case MessageType.INITIALIZATION_FAILURE: + this._handleInitializationFailure( + message as InitializationFailureMessage + ); + break; + + default: + this.logger.error( + `Unknown message received from worker: ${JSON.stringify(message)}` + ); + break; + } + } + + private _handleLog(message: LogMessage) { + switch (message.level) { + case LogLevel.TRACE: + this.logger.trace(message.logMessage); + break; + case LogLevel.INFO: + this.logger.info(message.logMessage); + break; + case LogLevel.ERROR: + this.logger.error(message.logMessage); + break; + } + } + + private _handleInitialized() { + this.workerStatus = WorkerStatus.RUNNING; + this.logger.info("Local HRE loaded"); + this.serverState.connection.sendNotification("custom/worker-initialized", { + projectBasePath: this.basePath, + }); + this._onInitialized(); + } + + private _handleInitializationFailure(msg: InitializationFailureMessage) { + this.workerLoadFailureReason = msg.reason; + } + + private _handleErrorResponse(msg: ErrorResponseMessage) { + const errorFunction = this._onError[msg.requestId]; + + if (errorFunction === undefined) { + this.logger.error( + `Error function not found for request id ${msg.requestId}` + ); + } else { + delete this._onError[msg.requestId]; + delete this._onResponse[msg.requestId]; + errorFunction(msg.error); + } + } + + private _handleFileBelongsResponse(msg: FileBelongsResponse) { + this._handleResponse(msg.requestId, msg.belongs); + } + + private _handleResolveImportResponse(msg: ResolveImportResponse) { + this._onResponse[msg.requestId](msg.path); + } + + private _handleBuildCompilationResponse(msg: BuildCompilationResponse) { + this._handleResponse(msg.requestId, msg.compilationDetails); + } + + private _handleResponse(requestId: number, result: any) { + const resolveFunction = this._onResponse[requestId]; + if (resolveFunction === undefined) { + this.logger.error( + `Resolve function not found for request id ${requestId}` + ); + } else { + delete this._onResponse[requestId]; + delete this._onError[requestId]; + resolveFunction(result); + } + } + + private _checkWorkerExists() { + if (this.workerProcess === undefined) { + throw new Error("Worker process not spawned"); + } + } + + private _checkWorkerNotInitializing() { + if (this.workerStatus === WorkerStatus.INITIALIZING) { + throw new Error("Worker is initializing"); + } + } + + private _checkWorkerIsRunning() { + if (this.workerStatus !== WorkerStatus.RUNNING) { + throw new Error( + `Worker is not running. Status: ${this.workerStatus}, error: ${this.workerLoadFailureReason}` + ); + } + } + + private _checkWorkerNotErrored() { + if (this.workerStatus === WorkerStatus.ERRORED) { + throw new Error(this.workerLoadFailureReason); + } + } +} diff --git a/server/src/frameworks/Hardhat/getImportCompletions.ts b/server/src/frameworks/Hardhat/getImportCompletions.ts new file mode 100644 index 00000000..b99cf5e6 --- /dev/null +++ b/server/src/frameworks/Hardhat/getImportCompletions.ts @@ -0,0 +1,106 @@ +import path from "path"; +import { + CompletionItem, + CompletionItemKind, + Position, +} from "vscode-languageserver-types"; +import { SolFileIndexMap } from "../../parser/common/types"; +import { replaceFor } from "../../services/completion/getImportPathCompletion"; +import { normalizeSlashes, toUnixStyle } from "../../utils"; + +interface ImportCompletionContext { + basePath: string; + solFileIndex: SolFileIndexMap; +} + +export function getImportCompletions( + ctx: ImportCompletionContext, + position: Position, + currentImport: string +): CompletionItem[] { + const contractFilePaths = + currentImport.includes("/") || currentImport.includes(path.sep) + ? _findNodeModulesContractFilesInIndex(ctx, currentImport) + : _findNodeModulePackagesInIndex(ctx); + + return contractFilePaths.map((pathFromNodeModules): CompletionItem => { + const normalizedPath = normalizeSlashes(pathFromNodeModules); + + const completionItem: CompletionItem = { + label: normalizedPath, + textEdit: replaceFor(normalizedPath, position, currentImport), + + kind: CompletionItemKind.Module, + documentation: "Imports the package", + }; + + return completionItem; + }); +} + +function _findNodeModulesContractFilesInIndex( + ctx: ImportCompletionContext, + currentImport: string +): string[] { + const nodeModulesPaths = _resolvePotentialNodeModulesPaths(ctx); + + let allContractFilePaths: string[] = []; + for (const nodeModulesPath of nodeModulesPaths) { + const basePath = toUnixStyle(path.join(nodeModulesPath, path.sep)); + + const basePathWithCurrentImport = toUnixStyle( + path.join(basePath, currentImport) + ); + + const contractFilePaths = Object.keys(ctx.solFileIndex) + .filter((fullPath) => fullPath.startsWith(basePathWithCurrentImport)) + .map((fullPath) => fullPath.replace(basePath, "")); + + allContractFilePaths = allContractFilePaths.concat(contractFilePaths); + } + + return allContractFilePaths; +} + +function _resolvePotentialNodeModulesPaths( + ctx: ImportCompletionContext +): string[] { + let current = ctx.basePath; + const nodeModulesPaths = []; + + while (current !== "/") { + const previous = current; + + const potentialPath = toUnixStyle(path.join(current, "node_modules")); + nodeModulesPaths.push(potentialPath); + + current = path.resolve(current, ".."); + + if (previous === current) { + break; + } + } + + return nodeModulesPaths; +} + +function _findNodeModulePackagesInIndex( + ctx: ImportCompletionContext +): string[] { + const nodeModulePaths = _resolvePotentialNodeModulesPaths(ctx); + + let modulePackages: string[] = []; + for (const nodeModulesPath of nodeModulePaths) { + const allNodeModulePaths = Object.keys(ctx.solFileIndex) + .filter((p) => p.startsWith(nodeModulesPath)) + .map((p) => p.replace(nodeModulesPath, "")); + + const uniqueFolders = Array.from( + new Set(allNodeModulePaths.map((p) => p.split("/")[1])) + ); + + modulePackages = modulePackages.concat(uniqueFolders); + } + + return Array.from(new Set(modulePackages)); +} diff --git a/server/src/frameworks/Hardhat/worker/WorkerLogger.ts b/server/src/frameworks/Hardhat/worker/WorkerLogger.ts new file mode 100644 index 00000000..8e6e54f1 --- /dev/null +++ b/server/src/frameworks/Hardhat/worker/WorkerLogger.ts @@ -0,0 +1,28 @@ +import { WorkerProcess } from "./WorkerProcess"; +import { LogMessage } from "./WorkerProtocol"; + +export class WorkerLogger { + constructor(private _workerProcess: WorkerProcess) {} + + public trace(msg: string) { + this._log(msg, LogLevel.TRACE); + } + + public info(msg: string) { + this._log(msg, LogLevel.INFO); + } + + public error(msg: string) { + this._log(msg, LogLevel.ERROR); + } + + private _log(msg: string, level: LogLevel) { + void this._workerProcess.send(new LogMessage(msg, level)); + } +} + +export enum LogLevel { + TRACE, + INFO, + ERROR, +} diff --git a/server/src/frameworks/Hardhat/worker/WorkerProcess.ts b/server/src/frameworks/Hardhat/worker/WorkerProcess.ts new file mode 100644 index 00000000..c617b7db --- /dev/null +++ b/server/src/frameworks/Hardhat/worker/WorkerProcess.ts @@ -0,0 +1,449 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { AnalysisResult, analyze } from "@nomicfoundation/solidity-analyzer"; +import { + ActionType, + CompilationJob, + HardhatRuntimeEnvironment, +} from "hardhat/types"; +import path from "path"; +import { isDeepStrictEqual } from "util"; +import { + TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE, + TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, + TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH, + TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES, + TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS, + TASK_COMPILE_SOLIDITY_READ_FILE, +} from "hardhat/builtin-tasks/task-names"; +import { SolidityFilesCache } from "hardhat/builtin-tasks/utils/solidity-files-cache"; +import { HardhatError } from "hardhat/internal/core/errors"; +import { realpathSync } from "fs"; +import { CompilationDetails } from "../../base/CompilationDetails"; +import { toUnixStyle, uriEquals } from "../../../utils"; +import { directoryContains } from "../../../utils/directoryContains"; +import { BuildInputError } from "../../base/Errors"; +import { WorkerLogger } from "./WorkerLogger"; +import { + BuildCompilationRequest, + BuildCompilationResponse, + ErrorResponseMessage, + FileBelongsRequest, + FileBelongsResponse, + InitializationFailureMessage, + Message, + MessageType, + ResolveImportRequest, + ResolveImportResponse, +} from "./WorkerProtocol"; + +delete process.env.HARDHAT_CONFIG; // remove hack from parent process + +export class WorkerProcess { + private logger: WorkerLogger; + + private hre!: HardhatRuntimeEnvironment; + private solidityFilesCache!: typeof SolidityFilesCache; + private solidityFilesCachePath!: string; + private originalReadFileAction!: ActionType<{ absolutePath: string }>; + + // private resolvedFiles?: { [sourcePath: string]: ResolvedFile }; + + private lastAnalyzedDocUri?: string; + private lastAnalysis?: AnalysisResult; + private lastCompilationDetails?: CompilationDetails; + + constructor() { + this.logger = new WorkerLogger(this); + } + + public async start() { + process.on("message", async (msg) => { + try { + await this._handleMessage(msg); + } catch (error: any) { + const errorData = error instanceof Error ? error.message : error; + + if (msg.requestId) { + await this.send(new ErrorResponseMessage(msg.requestId, errorData)); + } + } + }); + + try { + this._loadHRE(); + } catch (err: any) { + this.logger.error(`Error loading HRE: ${err}`); + let errorMessage; + if (err.message.includes("Cannot find module 'hardhat'")) { + errorMessage = + "Couldn't find local hardhat module. Make sure project dependencies are installed."; + } else { + errorMessage = err.message; + } + await this.send(new InitializationFailureMessage(errorMessage)); + process.exit(1); + } + + await this.send({ type: MessageType.INITIALIZED }); + } + + private _loadHRE() { + const hardhatBase = path.resolve( + require.resolve("hardhat", { paths: [process.cwd()] }), + "..", + "..", + ".." + ); + + require(`${hardhatBase}/register.js`); + + // Load project's local HRE through require + this.hre = require(`${hardhatBase}/internal/lib/hardhat-lib.js`); + + // Load local cache through require + const cacheModule = require(`${hardhatBase}/builtin-tasks/utils/solidity-files-cache`); + + this.solidityFilesCachePath = cacheModule.getSolidityFilesCachePath( + this.hre.config.paths + ); + this.solidityFilesCache = cacheModule.SolidityFilesCache; + + // Store original READ_FILE action, which we override + this.originalReadFileAction = + this.hre.tasks[TASK_COMPILE_SOLIDITY_READ_FILE].action; + } + + private async _handleMessage(msg: Message) { + switch (msg.type) { + case MessageType.FILE_BELONGS_REQUEST: + await this._handleFileBelongs(msg as FileBelongsRequest); + break; + case MessageType.RESOLVE_IMPORT_REQUEST: + await this._handleResolveImport(msg as ResolveImportRequest); + break; + case MessageType.BUILD_COMPILATION_REQUEST: + await this._handleCompilationRequest(msg as BuildCompilationRequest); + break; + case MessageType.INVALIDATE_BUILD_CACHE: + this._handleInvalidateCache(); + break; + default: + break; + } + } + private _handleInvalidateCache() { + this._clearBuildCache(); + } + + private _clearBuildCache() { + this.lastAnalysis = undefined; + this.lastCompilationDetails = undefined; + this.lastAnalyzedDocUri = undefined; + } + + private async _handleFileBelongs({ requestId, uri }: FileBelongsRequest) { + const sourcesPath = this.hre.config.paths.sources; + const nodeModulesPath = path.join( + this.hre.config.paths.root, + "node_modules" + ); + const belongs = + directoryContains(sourcesPath, uri) || + directoryContains(nodeModulesPath, uri); + + await this.send(new FileBelongsResponse(requestId, belongs)); + } + + private async _handleResolveImport({ + requestId, + from, + importPath, + projectBasePath, + }: ResolveImportRequest) { + const transformTaskName = "compile:solidity:transform-import-name"; + + let finalImportPath = importPath; + if (this.hre.tasks[transformTaskName] !== undefined) { + finalImportPath = await this.hre.run(transformTaskName, { + importName: importPath, + }); + } + + let resolvedPath: string | undefined; + // 1st try: resolve as local path, i.e. `foo/bar.sol` + try { + const requiredPath = require.resolve(finalImportPath, { + paths: [realpathSync(path.dirname(from))], + }); + + resolvedPath = toUnixStyle(realpathSync(requiredPath)); + } catch (error) { + // 2nd try: resolve as relative path to project root, i.e. `./foo/bar.sol` + try { + const requiredPath = require.resolve(`./${finalImportPath}`, { + paths: [projectBasePath], + }); + + resolvedPath = toUnixStyle(realpathSync(requiredPath)); + } catch (innerError) { + resolvedPath = undefined; + } + } + + await this.send(new ResolveImportResponse(requestId, resolvedPath)); + } + + private async _handleCompilationRequest(request: BuildCompilationRequest) { + // Send solc input back to LSP + const compilationDetails = await this._getCompilationDetails(request); + await this.send( + new BuildCompilationResponse(request.requestId, compilationDetails) + ); + } + + private async _getCompilationDetails( + request: BuildCompilationRequest + ): Promise { + const { sourceUri, openDocuments } = request; + + // Check source file to build is included in openDocuments + const documentText = openDocuments.find( + (doc) => doc.uri === sourceUri + )?.documentText; + + if (documentText === undefined) { + throw new Error( + `sourceUri (${sourceUri}) should be included in openDocuments ${JSON.stringify( + openDocuments.map((doc) => doc.uri) + )} ` + ); + } + + try { + // Analyze imports + const analysis = analyze(documentText); + + // Avoid rebuilding dependency graph. If imports didnt change, just update open documents' contents + if ( + sourceUri === this.lastAnalyzedDocUri && + isDeepStrictEqual(analysis, this.lastAnalysis) && + this.lastCompilationDetails !== undefined + ) { + for (const openDocument of openDocuments) { + const normalizedUri = toUnixStyle(openDocument.uri); + const sourceName = Object.keys( + this.lastCompilationDetails.input.sources + ).find((k) => normalizedUri.endsWith(k)); + + if (sourceName !== undefined) { + this.lastCompilationDetails.input.sources[sourceName] = { + content: openDocument.documentText, + }; + } + } + return this.lastCompilationDetails; + } + + // Override task + this.hre.tasks[TASK_COMPILE_SOLIDITY_READ_FILE].setAction( + async ( + args: { absolutePath: string }, + hre: HardhatRuntimeEnvironment, + runSuper + ) => { + const uri = toUnixStyle(args.absolutePath); + + const openDoc = openDocuments.find((doc) => doc.uri === uri); + + if (openDoc !== undefined) { + return openDoc.documentText; + } + + return this.originalReadFileAction(args, hre, runSuper); + } + ); + + const sourcePaths = await this.hre.run( + TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS + ); + + const sourceNames = ( + await this.hre.run(TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES, { + sourcePaths, + }) + ).filter((sourceName: string) => sourceUri.endsWith(sourceName)); + + const solidityFilesCache = await this.solidityFilesCache.readFromFile( + this.solidityFilesCachePath + ); + + const dependencyGraph = await this.hre.run( + TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH, + { sourceNames, solidityFilesCache } + ); + + const file = dependencyGraph + .getResolvedFiles() + .filter((f: { absolutePath: string }) => + uriEquals(toUnixStyle(f.absolutePath), sourceUri) + )[0]; + + if (file === undefined) { + throw new Error( + `File ${sourceUri} not found in sourceNames ${sourceNames}` + ); + } + + const compilationJob = await this.hre.run( + TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE, + { + file, + dependencyGraph, + } + ); + + if (compilationJob.reason) { + this.logger.trace( + `[WORKER] Compilation job failed ${compilationJob.reason}` + ); + + throw new Error(compilationJob.reason); + } + + const modifiedFiles = { + [sourceUri]: documentText, + }; + + for (const unsavedDocument of openDocuments) { + modifiedFiles[unsavedDocument.uri] = unsavedDocument.documentText; + } + + compilationJob + .getResolvedFiles() + .forEach( + (resolvedFile: { + absolutePath: string; + content: { rawContent: string }; + }) => { + const normalizeAbsPath = toUnixStyle(resolvedFile.absolutePath); + + if (modifiedFiles[normalizeAbsPath]) { + resolvedFile.content.rawContent = modifiedFiles[normalizeAbsPath]; + } + } + ); + + const input = await this.hre.run( + TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, + { + compilationJob, + } + ); + + const compilationDetails = { + input, + solcVersion: (compilationJob as CompilationJob).getSolcConfig().version, + }; + + this.lastCompilationDetails = compilationDetails; + this.lastAnalysis = analysis; + this.lastAnalyzedDocUri = sourceUri; + + return compilationDetails; + } catch (error: any) { + this._clearBuildCache(); + + // Translate hardhat error into BuildInputError. Pass other errors through + if (HardhatError.isHardhatError(error)) { + const IMPORT_FILE_ERROR_CODES = [404, 405, 406, 407, 408, 409, 412]; + const IMPORT_LIBRARY_ERROR_CODES = [411]; + + const { title, message, description, number } = error.errorDescriptor; + + const buildError: BuildInputError = { + _isBuildInputError: true, + fileSpecificErrors: {}, + projectWideErrors: [], + }; + + let importString: string | null; + + if (IMPORT_FILE_ERROR_CODES.includes(error.errorDescriptor.number)) { + importString = error.messageArguments.imported; + } else if ( + IMPORT_LIBRARY_ERROR_CODES.includes(error.errorDescriptor.number) + ) { + importString = error.messageArguments.library; + } else { + importString = null; + } + + if (importString === null) { + buildError.projectWideErrors = [ + { + type: "general", + message: `${title}: ${message}. ${description}`, + source: "hardhat", + code: number, + }, + ]; + } else { + // Get uri of file with import error + const errorFileUri = path.join( + this.hre.config.paths.root, + error.messageArguments.from + ); + + // If file is in open docs, get the position of the error + let startOffset; + let endOffset; + const errorDoc = openDocuments.find( + (doc) => doc.uri === errorFileUri + ); + if (errorDoc !== undefined) { + const errorDocText = errorDoc.documentText; + startOffset = errorDocText.indexOf(importString); + endOffset = startOffset + importString.length; + } + + buildError.fileSpecificErrors[errorFileUri] = [ + { + startOffset, + endOffset, + error: { + type: "import", + source: "hardhat", + message: title, + code: number, + }, + }, + ]; + } + + throw buildError; + } else { + throw error; + } + } + } + + public async send(msg: Message) { + return new Promise((resolve, reject) => { + if (!process.send) { + return; + } + + process.send(msg, (err: unknown) => { + if (err) { + return reject(err); + } + + resolve(); + }); + }); + } +} + +// eslint-disable-next-line @typescript-eslint/no-floating-promises +new WorkerProcess().start(); diff --git a/server/src/frameworks/Hardhat/worker/WorkerProtocol.ts b/server/src/frameworks/Hardhat/worker/WorkerProtocol.ts new file mode 100644 index 00000000..987cd1b2 --- /dev/null +++ b/server/src/frameworks/Hardhat/worker/WorkerProtocol.ts @@ -0,0 +1,126 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { OpenDocuments } from "../../../types"; +import { CompilationDetails } from "../../base/CompilationDetails"; +import { LogLevel } from "./WorkerLogger"; + +export enum MessageType { + INITIALIZED, + LOG, + ERROR_RESPONSE, + FILE_BELONGS_REQUEST, + FILE_BELONGS_RESPONSE, + RESOLVE_IMPORT_REQUEST, + RESOLVE_IMPORT_RESPONSE, + BUILD_COMPILATION_REQUEST, + BUILD_COMPILATION_RESPONSE, + INITIALIZATION_FAILURE, + INVALIDATE_BUILD_CACHE, +} + +export abstract class Message { + public abstract type: MessageType; +} + +export abstract class RequestMessage extends Message { + constructor(public requestId: number) { + super(); + } +} + +export abstract class ResponseMessage extends Message { + constructor(public requestId: number) { + super(); + } +} + +export class ErrorResponseMessage extends ResponseMessage { + public type = MessageType.ERROR_RESPONSE; + + constructor(requestId: number, public error: any) { + super(requestId); + } +} + +export class InitializedMessage extends Message { + public type = MessageType.INITIALIZED; +} + +export class LogMessage extends Message { + public type = MessageType.LOG; + + constructor(public logMessage: string, public level: LogLevel) { + super(); + } +} + +export class FileBelongsRequest extends RequestMessage { + public type = MessageType.FILE_BELONGS_REQUEST; + + constructor(requestId: number, public uri: string) { + super(requestId); + } +} + +export class FileBelongsResponse extends ResponseMessage { + public type = MessageType.FILE_BELONGS_RESPONSE; + constructor(requestId: number, public belongs: boolean) { + super(requestId); + } +} + +export class ResolveImportRequest extends RequestMessage { + public type = MessageType.RESOLVE_IMPORT_REQUEST; + + constructor( + requestId: number, + public from: string, + public importPath: string, + public projectBasePath: string + ) { + super(requestId); + } +} + +export class ResolveImportResponse extends ResponseMessage { + public type = MessageType.RESOLVE_IMPORT_RESPONSE; + constructor(requestId: number, public path: string | undefined) { + super(requestId); + } +} + +export class BuildCompilationRequest extends RequestMessage { + public type = MessageType.BUILD_COMPILATION_REQUEST; + constructor( + requestId: number, + public sourceUri: string, + public openDocuments: OpenDocuments + ) { + super(requestId); + } +} + +export class BuildCompilationResponse extends ResponseMessage { + public type = MessageType.BUILD_COMPILATION_RESPONSE; + constructor( + requestId: number, + public compilationDetails: CompilationDetails + ) { + super(requestId); + } +} + +export class InitializationFailureMessage extends Message { + public type = MessageType.INITIALIZATION_FAILURE; + + constructor(public reason: string) { + super(); + } +} + +export class InvalidateBuildCacheMessage extends Message { + public type = MessageType.INVALIDATE_BUILD_CACHE; + + constructor() { + super(); + } +} diff --git a/server/src/frameworks/Projectless/ProjectlessIndexer.ts b/server/src/frameworks/Projectless/ProjectlessIndexer.ts new file mode 100644 index 00000000..8c124806 --- /dev/null +++ b/server/src/frameworks/Projectless/ProjectlessIndexer.ts @@ -0,0 +1,9 @@ +import { WorkspaceFolder } from "vscode-languageserver-protocol"; +import { ProjectIndexer } from "../base/ProjectIndexer"; +import { ProjectlessProject } from "./ProjectlessProject"; + +export class ProjectlessIndexer extends ProjectIndexer { + public async index(folder: WorkspaceFolder) { + return [new ProjectlessProject(this.serverState, folder.uri)]; + } +} diff --git a/server/src/frameworks/Projectless/ProjectlessProject.ts b/server/src/frameworks/Projectless/ProjectlessProject.ts new file mode 100644 index 00000000..61d8ab3c --- /dev/null +++ b/server/src/frameworks/Projectless/ProjectlessProject.ts @@ -0,0 +1,61 @@ +import fs from "fs"; +import _ from "lodash"; +import path from "path"; +import { DidChangeWatchedFilesParams } from "vscode-languageserver-protocol"; +import { OpenDocuments, ServerState } from "../../types"; +import { toUnixStyle } from "../../utils"; +import { CompilationDetails } from "../base/CompilationDetails"; +import { Project } from "../base/Project"; +import { buildBasicCompilation } from "../shared/buildBasicCompilation"; + +export class ProjectlessProject extends Project { + // These are in place just to implement ISolProject. Should be removed after refactor + public configPath = undefined; + + public priority = 0; + + constructor(serverState: ServerState, basePath: string) { + super(serverState, basePath); + } + + public id(): string { + return "projectless"; + } + + public frameworkName(): string { + return "Projectless"; + } + + public async initialize(): Promise { + return; + } + + public async fileBelongs(_file: string) { + return true; + } + + public async resolveImportPath(file: string, importPath: string) { + try { + const resolvedPath = require.resolve(importPath, { + paths: [fs.realpathSync(path.dirname(file))], + }); + + return toUnixStyle(fs.realpathSync(resolvedPath)); + } catch (error) { + return undefined; + } + } + + public async buildCompilation( + sourceUri: string, + openDocuments: OpenDocuments + ): Promise { + return buildBasicCompilation(this, sourceUri, openDocuments); + } + + public async onWatchedFilesChanges( + _params: DidChangeWatchedFilesParams + ): Promise { + return; + } +} diff --git a/server/src/frameworks/base/CompilationDetails.ts b/server/src/frameworks/base/CompilationDetails.ts new file mode 100644 index 00000000..cdd142d6 --- /dev/null +++ b/server/src/frameworks/base/CompilationDetails.ts @@ -0,0 +1,6 @@ +import { CompilerInput } from "hardhat/types"; + +export interface CompilationDetails { + input: CompilerInput; + solcVersion: string; +} diff --git a/server/src/frameworks/base/Errors.ts b/server/src/frameworks/base/Errors.ts new file mode 100644 index 00000000..ead0e192 --- /dev/null +++ b/server/src/frameworks/base/Errors.ts @@ -0,0 +1,19 @@ +export interface ErrorDetail { + source: string; + code?: number; + type: string; + message: string; +} + +export interface BuildInputError { + _isBuildInputError: true; + + projectWideErrors: ErrorDetail[]; + fileSpecificErrors: { + [uri: string]: Array<{ + startOffset?: number; + endOffset?: number; + error: ErrorDetail; + }>; + }; +} diff --git a/server/src/frameworks/base/Project.ts b/server/src/frameworks/base/Project.ts new file mode 100644 index 00000000..679880e6 --- /dev/null +++ b/server/src/frameworks/base/Project.ts @@ -0,0 +1,52 @@ +import { DidChangeWatchedFilesParams } from "vscode-languageserver-protocol"; +import { CompletionItem, Position } from "vscode-languageserver-types"; +import { OpenDocuments, ServerState } from "../../types"; +import { CompilationDetails } from "./CompilationDetails"; + +export abstract class Project { + constructor(public serverState: ServerState, public basePath: string) {} + + public abstract configPath?: string; + + // Used for when multiple projects match for a sol file + public abstract priority: number; + + // Unique identifier of a project + public abstract id(): string; + + // Check if a solidity file belongs to this project + public abstract fileBelongs(file: string): Promise; + + // Resolve the full path of an import statement + public abstract resolveImportPath( + file: string, + importPath: string + ): Promise; + + // Any tasks the project need to run to be in an operative state + public abstract initialize(): Promise; + + // Given a source file and open docs, get a solc build and version + public abstract buildCompilation( + sourceUri: string, + openDocuments: OpenDocuments + ): Promise; + + // Callback for watched files events + public abstract onWatchedFilesChanges( + params: DidChangeWatchedFilesParams + ): Promise; + + public abstract frameworkName(): string; + + public invalidateBuildCache() { + // to be overriden if necessary + } + + public getImportCompletions( + _position: Position, + _currentImport: string + ): CompletionItem[] { + return []; + } +} diff --git a/server/src/frameworks/base/ProjectIndexer.ts b/server/src/frameworks/base/ProjectIndexer.ts new file mode 100644 index 00000000..ad738a15 --- /dev/null +++ b/server/src/frameworks/base/ProjectIndexer.ts @@ -0,0 +1,13 @@ +import { WorkspaceFolder } from "vscode-languageserver-protocol"; +import { ServerState } from "../../types"; +import { WorkspaceFileRetriever } from "../../utils/WorkspaceFileRetriever"; +import { Project } from "./Project"; + +export abstract class ProjectIndexer { + constructor( + public serverState: ServerState, + public fileRetriever: WorkspaceFileRetriever + ) {} + + public abstract index(folder: WorkspaceFolder): Promise; +} diff --git a/server/src/frameworks/shared/buildBasicCompilation.ts b/server/src/frameworks/shared/buildBasicCompilation.ts new file mode 100644 index 00000000..3a127959 --- /dev/null +++ b/server/src/frameworks/shared/buildBasicCompilation.ts @@ -0,0 +1,77 @@ +import _ from "lodash"; +import semver from "semver"; +import { OpenDocuments } from "../../types"; +import { CompilationDetails } from "../base/CompilationDetails"; +import { Project } from "../base/Project"; +import { getDependenciesAndPragmas } from "./crawlDependencies"; + +export async function buildBasicCompilation( + project: Project, + sourceUri: string, + openDocuments: OpenDocuments, + explicitSolcVersion?: string +): Promise { + // Load contract text from openDocuments + const documentText = openDocuments.find( + (doc) => doc.uri === sourceUri + )?.documentText; + + if (documentText === undefined) { + throw new Error( + `sourceUri (${sourceUri}) should be included in openDocuments ${JSON.stringify( + openDocuments.map((doc) => doc.uri) + )} ` + ); + } + + // Get list of all dependencies (deep) and their pragma statements + const dependencyDetails = await getDependenciesAndPragmas(project, sourceUri); + const pragmas = _.flatten(_.map(dependencyDetails, "pragmas")); + + // Use specified solc version or determine it based on available versions and pragma statements + let solcVersion = explicitSolcVersion; + + if (solcVersion === undefined) { + const resolvedSolcVersion = semver.maxSatisfying( + project.serverState.solcVersions, + pragmas.join(" ") + ); + + if (resolvedSolcVersion === null) { + throw new Error(`No available solc version satisfying ${pragmas}`); + } + + solcVersion = resolvedSolcVersion; + } + + // Build solc input + const filePathsToCompile = _.map(dependencyDetails, "absolutePath"); + const sources: { [uri: string]: { content: string } } = {}; + for (const filePath of filePathsToCompile) { + // Read all sol files via openDocuments or solFileIndex + const contractText = + openDocuments.find((doc) => doc.uri === filePath)?.documentText ?? + project.serverState.solFileIndex[filePath].text; + if (contractText === undefined) { + throw new Error(`Contract not indexed: ${filePath}`); + } + sources[filePath] = { content: contractText }; + } + + sources[sourceUri] = { content: documentText }; + + return { + input: { + language: "Solidity", + sources, + settings: { + outputSelection: {}, + optimizer: { + enabled: false, + runs: 200, + }, + }, + }, + solcVersion, + }; +} diff --git a/server/src/frameworks/shared/crawlDependencies.ts b/server/src/frameworks/shared/crawlDependencies.ts new file mode 100644 index 00000000..2b20457b --- /dev/null +++ b/server/src/frameworks/shared/crawlDependencies.ts @@ -0,0 +1,67 @@ +import { analyze } from "@nomicfoundation/solidity-analyzer"; +import { analyzeSolFile } from "../../parser/analyzer/analyzeSolFile"; +import { getOrInitialiseSolFileEntry } from "../../utils/getOrInitialiseSolFileEntry"; +import { Project } from "../base/Project"; + +interface DependencyDetail { + absolutePath: string; + pragmas: string[]; +} + +export async function getDependenciesAndPragmas( + project: Project, + sourceUri: string, + visited: string[] = [] +): Promise { + if (visited.includes(sourceUri)) { + return []; + } + + let text = project.serverState.solFileIndex[sourceUri]?.text; + + if (text === undefined) { + // TODO: inject this + const solFileEntry = getOrInitialiseSolFileEntry( + project.serverState, + sourceUri + ); + + if (!solFileEntry.isAnalyzed()) { + await analyzeSolFile(project.serverState, solFileEntry); + } + } + + text = project.serverState.solFileIndex[sourceUri]?.text; + + if (text === undefined) { + throw new Error(`Couldnt find/index ${sourceUri}`); + } + + visited.push(sourceUri); + + // Analyze current file for import strings and pragmas + const { imports, versionPragmas } = analyze(text); + + // Build list with current file and prepare for dependencies + const dependencyDetails = [ + { absolutePath: sourceUri, pragmas: versionPragmas }, + ]; + + // Recursively crawl dependencies and append. Skip non-existing imports + const importsUris: string[] = []; + for (const _import of imports) { + const resolvedImport = await project.resolveImportPath(sourceUri, _import); + + if (resolvedImport !== undefined) { + importsUris.push(resolvedImport); + } + } + + for (const importUri of importsUris) { + dependencyDetails.push( + ...(await getDependenciesAndPragmas(project, importUri, visited)) + ); + } + + return dependencyDetails; +} diff --git a/server/src/hardhat.config.ts b/server/src/hardhat.config.ts new file mode 100644 index 00000000..ff8b4c56 --- /dev/null +++ b/server/src/hardhat.config.ts @@ -0,0 +1 @@ +export default {}; diff --git a/server/src/index.ts b/server/src/index.ts index cda62bf1..917808d3 100644 --- a/server/src/index.ts +++ b/server/src/index.ts @@ -2,9 +2,8 @@ import "module-alias/register"; import { createConnection, ProposedFeatures } from "vscode-languageserver/node"; import { ConnectionLogger } from "@utils/Logger"; -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; +import { WorkspaceFileRetriever } from "@utils/WorkspaceFileRetriever"; import setupServer from "./server"; -import { compilerProcessFactory } from "./services/validation/compilerProcessFactory"; import { SentryServerTelemetry } from "./telemetry/SentryServerTelemetry"; import { GoogleAnalytics } from "./analytics/GoogleAnalytics"; @@ -29,13 +28,7 @@ const telemetry = new SentryServerTelemetry( ); const logger = new ConnectionLogger(connection, telemetry); -setupServer( - connection, - compilerProcessFactory, - workspaceFileRetriever, - telemetry, - logger -); +setupServer(connection, workspaceFileRetriever, telemetry, logger); // Listen on the connection connection.listen(); diff --git a/server/src/parser/analyzer/HardhatProject.ts b/server/src/parser/analyzer/HardhatProject.ts deleted file mode 100644 index 5896f948..00000000 --- a/server/src/parser/analyzer/HardhatProject.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { ISolProject, Remapping } from "@common/types"; -import { WorkspaceFolder } from "vscode-languageserver-protocol"; - -export function isHardhatProject( - project: ISolProject -): project is HardhatProject { - return project.type === "hardhat"; -} - -export class HardhatProject implements ISolProject { - public type: "hardhat" = "hardhat"; - - constructor( - public basePath: string, - public configPath: string, - public workspaceFolder: WorkspaceFolder, - public remappings: Remapping[] = [] - ) {} -} diff --git a/server/src/parser/analyzer/NoProject.ts b/server/src/parser/analyzer/NoProject.ts deleted file mode 100644 index ef2e824a..00000000 --- a/server/src/parser/analyzer/NoProject.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { ISolProject, SolProjectType } from "@common/types"; -import { WorkspaceFolder } from "vscode-languageserver-protocol"; - -export function isNoProject(project: ISolProject): project is NoProject { - return project.type === "none"; -} - -export class NoProject implements ISolProject { - public type: SolProjectType = "none"; - public basePath = ""; - public configPath = ""; - public workspaceFolder: WorkspaceFolder = { - name: "none", - uri: "", - }; -} diff --git a/server/src/parser/analyzer/SolFileEntry.ts b/server/src/parser/analyzer/SolFileEntry.ts index 314b9162..38526499 100644 --- a/server/src/parser/analyzer/SolFileEntry.ts +++ b/server/src/parser/analyzer/SolFileEntry.ts @@ -6,28 +6,25 @@ import { EmptyNode, Searcher as ISearcher, SolFileState, - ISolProject, - ClientTrackingState, } from "@common/types"; +import { Project } from "../../frameworks/base/Project"; export class SolFileEntry implements ISolFileEntry { public uri: string; - public project: ISolProject; + public project: Project; public text: string | undefined; public status: SolFileState; - public tracking: ClientTrackingState; public ast: ASTNode | undefined; public analyzerTree: { tree: Node }; public searcher: ISearcher; public orphanNodes: Node[] = []; - private constructor(uri: string, project: ISolProject) { + private constructor(uri: string, project: Project) { this.uri = uri; this.project = project; this.text = ""; this.status = SolFileState.UNLOADED; - this.tracking = ClientTrackingState.UNTRACKED; this.analyzerTree = { tree: new EmptyNode( @@ -41,13 +38,13 @@ export class SolFileEntry implements ISolFileEntry { this.searcher = new Searcher(this.analyzerTree); } - public static createUnloadedEntry(uri: string, project: ISolProject) { + public static createUnloadedEntry(uri: string, project: Project) { return new SolFileEntry(uri, project); } - public static createLoadedUntrackedEntry( + public static createLoadedEntry( uri: string, - project: ISolProject, + project: Project, text: string ): ISolFileEntry { const unloaded = new SolFileEntry(uri, project); @@ -55,16 +52,6 @@ export class SolFileEntry implements ISolFileEntry { return unloaded.loadText(text); } - public static createLoadedTrackedEntry( - uri: string, - project: ISolProject, - text: string - ): ISolFileEntry { - const unloaded = new SolFileEntry(uri, project); - const loaded = unloaded.loadText(text); - return loaded.track(); - } - public loadText(text: string) { this.status = SolFileState.LOADED; this.text = text; @@ -72,18 +59,6 @@ export class SolFileEntry implements ISolFileEntry { return this; } - public track(): ISolFileEntry { - this.tracking = ClientTrackingState.TRACKED; - - return this; - } - - public untrack(): ISolFileEntry { - this.tracking = ClientTrackingState.UNTRACKED; - - return this; - } - public isAnalyzed(): boolean { return this.status === SolFileState.ANALYZED; } diff --git a/server/src/parser/analyzer/analyzeSolFile.ts b/server/src/parser/analyzer/analyzeSolFile.ts index f97c28ed..04458cd8 100644 --- a/server/src/parser/analyzer/analyzeSolFile.ts +++ b/server/src/parser/analyzer/analyzeSolFile.ts @@ -8,11 +8,11 @@ import { SolFileState, } from "@common/types"; -export function analyzeSolFile( +export async function analyzeSolFile( { solFileIndex }: { solFileIndex: SolFileIndexMap }, solFileEntry: ISolFileEntry, newText?: string -): Node | undefined { +): Promise { try { solFileEntry.orphanNodes = []; @@ -42,14 +42,16 @@ export function analyzeSolFile( } solFileEntry.status = SolFileState.ANALYZED; - solFileEntry.analyzerTree.tree = matcher - .find( - solFileEntry.ast, - solFileEntry.uri, - solFileEntry.project.basePath, - solFileIndex - ) - .accept(matcher.find, solFileEntry.orphanNodes); + const node = await matcher.find( + solFileEntry.ast, + solFileEntry.uri, + solFileEntry.project.basePath, + solFileIndex + ); + solFileEntry.analyzerTree.tree = await node.accept( + matcher.find, + solFileEntry.orphanNodes + ); return solFileEntry.analyzerTree.tree; } catch { diff --git a/server/src/parser/analyzer/matcher.ts b/server/src/parser/analyzer/matcher.ts index cedd2756..0bffe7fa 100644 --- a/server/src/parser/analyzer/matcher.ts +++ b/server/src/parser/analyzer/matcher.ts @@ -77,6 +77,8 @@ import { RevertStatementNode } from "@analyzer/nodes/RevertStatementNode"; import { TypeDefinitionNode } from "@analyzer/nodes/TypeDefinitionNode"; import { Node, SolFileIndexMap } from "@common/types"; +import { realpathSync } from "fs"; +import { toUnixStyle } from "../../utils"; type ASTTypes = astTypes.ASTNode["type"]; type ASTMap = { [K in ASTTypes]: U extends { type: K } ? U : never }; @@ -109,28 +111,48 @@ function matcher( ); } -export const find = matcher({ - SourceUnit: ( +export const find = matcher>({ + SourceUnit: async ( sourceUnit: astTypes.SourceUnit, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new SourceUnitNode(sourceUnit, uri, rootPath, documentsAnalyzer), - PragmaDirective: ( + PragmaDirective: async ( pragmaDirective: astTypes.PragmaDirective, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new PragmaDirectiveNode(pragmaDirective, uri, rootPath, documentsAnalyzer), - ImportDirective: ( + ImportDirective: async ( importDirective: astTypes.ImportDirective, uri: string, rootPath: string, - documentsAnalyzer: SolFileIndexMap - ) => - new ImportDirectiveNode(importDirective, uri, rootPath, documentsAnalyzer), - ContractDefinition: ( + solFileIndex: SolFileIndexMap + ) => { + const solFileEntry = solFileIndex[uri]; + const realUri = toUnixStyle(realpathSync(uri)); + let resolvedUri = ""; + try { + resolvedUri = + (await solFileEntry.project.resolveImportPath( + realUri, + importDirective.path + )) ?? ""; + } catch (err) { + // + } + + return new ImportDirectiveNode( + importDirective, + uri, + rootPath, + solFileIndex, + resolvedUri + ); + }, + ContractDefinition: async ( contractDefinition: astTypes.ContractDefinition, uri: string, rootPath: string, @@ -142,7 +164,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - InheritanceSpecifier: ( + InheritanceSpecifier: async ( inheritanceSpecifier: astTypes.InheritanceSpecifier, uri: string, rootPath: string, @@ -154,7 +176,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - StateVariableDeclaration: ( + StateVariableDeclaration: async ( stateVariableDeclaration: astTypes.StateVariableDeclaration, uri: string, rootPath: string, @@ -166,7 +188,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - UsingForDeclaration: ( + UsingForDeclaration: async ( usingForDeclaration: astTypes.UsingForDeclaration, uri: string, rootPath: string, @@ -178,7 +200,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - StructDefinition: ( + StructDefinition: async ( structDefinition: astTypes.StructDefinition, uri: string, rootPath: string, @@ -190,7 +212,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - ModifierDefinition: ( + ModifierDefinition: async ( modifierDefinition: astTypes.ModifierDefinition, uri: string, rootPath: string, @@ -202,7 +224,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - ModifierInvocation: ( + ModifierInvocation: async ( modifierInvocation: astTypes.ModifierInvocation, uri: string, rootPath: string, @@ -214,7 +236,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - FunctionDefinition: ( + FunctionDefinition: async ( functionDefinition: astTypes.FunctionDefinition, uri: string, rootPath: string, @@ -226,26 +248,26 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - EventDefinition: ( + EventDefinition: async ( eventDefinition: astTypes.EventDefinition, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new EventDefinitionNode(eventDefinition, uri, rootPath, documentsAnalyzer), - EnumValue: ( + EnumValue: async ( enumValue: astTypes.EnumValue, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new EnumValueNode(enumValue, uri, rootPath, documentsAnalyzer), - EnumDefinition: ( + EnumDefinition: async ( enumDefinition: astTypes.EnumDefinition, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new EnumDefinitionNode(enumDefinition, uri, rootPath, documentsAnalyzer), - VariableDeclaration: ( + VariableDeclaration: async ( variableDeclaration: astTypes.VariableDeclaration, uri: string, rootPath: string, @@ -257,7 +279,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - UserDefinedTypeName: ( + UserDefinedTypeName: async ( userDefinedTypeName: astTypes.UserDefinedTypeName, uri: string, rootPath: string, @@ -269,19 +291,19 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - ArrayTypeName: ( + ArrayTypeName: async ( arrayTypeName: astTypes.ArrayTypeName, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ArrayTypeNameNode(arrayTypeName, uri, rootPath, documentsAnalyzer), - Mapping: ( + Mapping: async ( mapping: astTypes.Mapping, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new MappingNode(mapping, uri, rootPath, documentsAnalyzer), - ElementaryTypeName: ( + ElementaryTypeName: async ( elementaryTypeName: astTypes.ElementaryTypeName, uri: string, rootPath: string, @@ -293,7 +315,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - FunctionTypeName: ( + FunctionTypeName: async ( functionTypeName: astTypes.FunctionTypeName, uri: string, rootPath: string, @@ -305,13 +327,13 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - Block: ( + Block: async ( block: astTypes.Block, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new BlockNode(block, uri, rootPath, documentsAnalyzer), - ExpressionStatement: ( + ExpressionStatement: async ( expressionStatement: astTypes.ExpressionStatement, uri: string, rootPath: string, @@ -323,13 +345,13 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - IfStatement: ( + IfStatement: async ( ifStatement: astTypes.IfStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new IfStatementNode(ifStatement, uri, rootPath, documentsAnalyzer), - UncheckedStatement: ( + UncheckedStatement: async ( uncheckedStatement: astTypes.UncheckedStatement, uri: string, rootPath: string, @@ -341,19 +363,19 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - WhileStatement: ( + WhileStatement: async ( whileStatement: astTypes.WhileStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new WhileStatementNode(whileStatement, uri, rootPath, documentsAnalyzer), - ForStatement: ( + ForStatement: async ( forStatement: astTypes.ForStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ForStatementNode(forStatement, uri, rootPath, documentsAnalyzer), - InlineAssemblyStatement: ( + InlineAssemblyStatement: async ( inlineAssemblyStatement: astTypes.InlineAssemblyStatement, uri: string, rootPath: string, @@ -365,7 +387,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - DoWhileStatement: ( + DoWhileStatement: async ( doWhileStatement: astTypes.DoWhileStatement, uri: string, rootPath: string, @@ -377,7 +399,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - ContinueStatement: ( + ContinueStatement: async ( continueStatement: astTypes.ContinueStatement, uri: string, rootPath: string, @@ -389,44 +411,44 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - Break: ( + Break: async ( astBreak: astTypes.Break, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new BreakNode(astBreak, uri, rootPath, documentsAnalyzer), - Continue: ( + Continue: async ( astContinue: astTypes.Continue, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ContinueNode(astContinue, uri, rootPath, documentsAnalyzer), - BreakStatement: ( + BreakStatement: async ( breakStatement: astTypes.BreakStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new BreakStatementNode(breakStatement, uri, rootPath, documentsAnalyzer), - ReturnStatement: ( + ReturnStatement: async ( returnStatement: astTypes.ReturnStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ReturnStatementNode(returnStatement, uri, rootPath, documentsAnalyzer), - EmitStatement: ( + EmitStatement: async ( emitStatement: astTypes.EmitStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new EmitStatementNode(emitStatement, uri, rootPath, documentsAnalyzer), - ThrowStatement: ( + ThrowStatement: async ( throwStatement: astTypes.ThrowStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ThrowStatementNode(throwStatement, uri, rootPath, documentsAnalyzer), - VariableDeclarationStatement: ( + VariableDeclarationStatement: async ( variableDeclarationStatement: astTypes.VariableDeclarationStatement, uri: string, rootPath: string, @@ -438,25 +460,25 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - FunctionCall: ( + FunctionCall: async ( functionCall: astTypes.FunctionCall, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new FunctionCallNode(functionCall, uri, rootPath, documentsAnalyzer), - AssemblyBlock: ( + AssemblyBlock: async ( assemblyBlock: astTypes.AssemblyBlock, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblyBlockNode(assemblyBlock, uri, rootPath, documentsAnalyzer), - AssemblyCall: ( + AssemblyCall: async ( assemblyCall: astTypes.AssemblyCall, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblyCallNode(assemblyCall, uri, rootPath, documentsAnalyzer), - AssemblyLocalDefinition: ( + AssemblyLocalDefinition: async ( assemblyLocalDefinition: astTypes.AssemblyLocalDefinition, uri: string, rootPath: string, @@ -468,7 +490,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - AssemblyAssignment: ( + AssemblyAssignment: async ( assemblyAssignment: astTypes.AssemblyAssignment, uri: string, rootPath: string, @@ -480,7 +502,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - AssemblyStackAssignment: ( + AssemblyStackAssignment: async ( assemblyStackAssignment: astTypes.AssemblyStackAssignment, uri: string, rootPath: string, @@ -492,26 +514,26 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - LabelDefinition: ( + LabelDefinition: async ( labelDefinition: astTypes.LabelDefinition, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new LabelDefinitionNode(labelDefinition, uri, rootPath, documentsAnalyzer), - AssemblySwitch: ( + AssemblySwitch: async ( assemblySwitch: astTypes.AssemblySwitch, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblySwitchNode(assemblySwitch, uri, rootPath, documentsAnalyzer), - AssemblyCase: ( + AssemblyCase: async ( assemblyCase: astTypes.AssemblyCase, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblyCaseNode(assemblyCase, uri, rootPath, documentsAnalyzer), - AssemblyFunctionDefinition: ( + AssemblyFunctionDefinition: async ( assemblyFunctionDefinition: astTypes.AssemblyFunctionDefinition, uri: string, rootPath: string, @@ -523,7 +545,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - AssemblyFunctionReturns: ( + AssemblyFunctionReturns: async ( assemblyFunctionReturns: astTypes.AssemblyFunctionReturns, uri: string, rootPath: string, @@ -535,38 +557,38 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - AssemblyFor: ( + AssemblyFor: async ( assemblyFor: astTypes.AssemblyFor, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblyForNode(assemblyFor, uri, rootPath, documentsAnalyzer), - AssemblyIf: ( + AssemblyIf: async ( assemblyIf: astTypes.AssemblyIf, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new AssemblyIfNode(assemblyIf, uri, rootPath, documentsAnalyzer), - SubAssembly: ( + SubAssembly: async ( subAssembly: astTypes.SubAssembly, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new SubAssemblyNode(subAssembly, uri, rootPath, documentsAnalyzer), - NewExpression: ( + NewExpression: async ( newExpression: astTypes.NewExpression, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new NewExpressionNode(newExpression, uri, rootPath, documentsAnalyzer), - TupleExpression: ( + TupleExpression: async ( tupleExpression: astTypes.TupleExpression, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new TupleExpressionNode(tupleExpression, uri, rootPath, documentsAnalyzer), - TypeNameExpression: ( + TypeNameExpression: async ( typeNameExpression: astTypes.TypeNameExpression, uri: string, rootPath: string, @@ -578,7 +600,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - NameValueExpression: ( + NameValueExpression: async ( nameValueExpression: astTypes.NameValueExpression, uri: string, rootPath: string, @@ -590,62 +612,62 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - NumberLiteral: ( + NumberLiteral: async ( numberLiteral: astTypes.NumberLiteral, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new NumberLiteralNode(numberLiteral, uri, rootPath, documentsAnalyzer), - BooleanLiteral: ( + BooleanLiteral: async ( booleanLiteral: astTypes.BooleanLiteral, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new BooleanLiteralNode(booleanLiteral, uri, rootPath, documentsAnalyzer), - HexLiteral: ( + HexLiteral: async ( hexLiteral: astTypes.HexLiteral, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new HexLiteralNode(hexLiteral, uri, rootPath, documentsAnalyzer), - StringLiteral: ( + StringLiteral: async ( stringLiteral: astTypes.StringLiteral, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new StringLiteralNode(stringLiteral, uri, rootPath, documentsAnalyzer), - Identifier: ( + Identifier: async ( identifier: astTypes.Identifier, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new IdentifierNode(identifier, uri, rootPath, documentsAnalyzer), - BinaryOperation: ( + BinaryOperation: async ( binaryOperation: astTypes.BinaryOperation, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new BinaryOperationNode(binaryOperation, uri, rootPath, documentsAnalyzer), - UnaryOperation: ( + UnaryOperation: async ( unaryOperation: astTypes.UnaryOperation, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new UnaryOperationNode(unaryOperation, uri, rootPath, documentsAnalyzer), - Conditional: ( + Conditional: async ( conditional: astTypes.Conditional, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new ConditionalNode(conditional, uri, rootPath, documentsAnalyzer), - IndexAccess: ( + IndexAccess: async ( indexAccess: astTypes.IndexAccess, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new IndexAccessNode(indexAccess, uri, rootPath, documentsAnalyzer), - IndexRangeAccess: ( + IndexRangeAccess: async ( indexRangeAccess: astTypes.IndexRangeAccess, uri: string, rootPath: string, @@ -657,37 +679,37 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - MemberAccess: ( + MemberAccess: async ( memberAccess: astTypes.MemberAccess, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new MemberAccessNode(memberAccess, uri, rootPath, documentsAnalyzer), - HexNumber: ( + HexNumber: async ( hexNumber: astTypes.HexNumber, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new HexNumberNode(hexNumber, uri, rootPath, documentsAnalyzer), - DecimalNumber: ( + DecimalNumber: async ( decimalNumber: astTypes.DecimalNumber, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new DecimalNumberNode(decimalNumber, uri, rootPath, documentsAnalyzer), - TryStatement: ( + TryStatement: async ( tryStatement: astTypes.TryStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new TryStatementNode(tryStatement, uri, rootPath, documentsAnalyzer), - NameValueList: ( + NameValueList: async ( nameValueList: astTypes.NameValueList, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new NameValueListNode(nameValueList, uri, rootPath, documentsAnalyzer), - AssemblyMemberAccess: ( + AssemblyMemberAccess: async ( assemblyMemberAccess: astTypes.AssemblyMemberAccess, uri: string, rootPath: string, @@ -699,13 +721,13 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - CatchClause: ( + CatchClause: async ( catchClause: astTypes.CatchClause, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new CatchClauseNode(catchClause, uri, rootPath, documentsAnalyzer), - FileLevelConstant: ( + FileLevelConstant: async ( fileLevelConstant: astTypes.FileLevelConstant, uri: string, rootPath: string, @@ -717,7 +739,7 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - CustomErrorDefinition: ( + CustomErrorDefinition: async ( customErrorDefinition: astTypes.CustomErrorDefinition, uri: string, rootPath: string, @@ -729,14 +751,14 @@ export const find = matcher({ rootPath, documentsAnalyzer ), - RevertStatement: ( + RevertStatement: async ( revertStatement: astTypes.RevertStatement, uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap ) => new RevertStatementNode(revertStatement, uri, rootPath, documentsAnalyzer), - TypeDefinition: ( + TypeDefinition: async ( typeDefinition: astTypes.TypeDefinition, uri: string, rootPath: string, diff --git a/server/src/parser/analyzer/nodes/ArrayTypeNameNode.ts b/server/src/parser/analyzer/nodes/ArrayTypeNameNode.ts index 01ce7b11..f6748ef7 100644 --- a/server/src/parser/analyzer/nodes/ArrayTypeNameNode.ts +++ b/server/src/parser/analyzer/nodes/ArrayTypeNameNode.ts @@ -18,20 +18,26 @@ export class ArrayTypeNameNode extends Node { this.astNode = arrayTypeName; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - const typeNode = find( + const foundTypeNode = await find( this.astNode.baseTypeName, this.uri, this.rootPath, this.solFileIndex - ).accept(find, orphanNodes, parent, this); + ); + const typeNode = await foundTypeNode.accept( + find, + orphanNodes, + parent, + this + ); // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (typeNode) { diff --git a/server/src/parser/analyzer/nodes/AssemblyAssignmentNode.ts b/server/src/parser/analyzer/nodes/AssemblyAssignmentNode.ts index e0a96b29..4cff4003 100644 --- a/server/src/parser/analyzer/nodes/AssemblyAssignmentNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyAssignmentNode.ts @@ -18,30 +18,33 @@ export class AssemblyAssignmentNode extends Node { this.astNode = assemblyAssignment; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const name of this.astNode.names ?? []) { - find(name, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent + const foundNode = await find( + name, + this.uri, + this.rootPath, + this.solFileIndex ); + await foundNode.accept(find, orphanNodes, parent); } // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.expression) { - find( + const foundNode = await find( this.astNode.expression, this.uri, this.rootPath, this.solFileIndex - ).accept(find, orphanNodes, parent); + ); + await foundNode.accept(find, orphanNodes, parent); } return this; diff --git a/server/src/parser/analyzer/nodes/AssemblyBlockNode.ts b/server/src/parser/analyzer/nodes/AssemblyBlockNode.ts index 08d02869..7ece030a 100644 --- a/server/src/parser/analyzer/nodes/AssemblyBlockNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyBlockNode.ts @@ -18,12 +18,12 @@ export class AssemblyBlockNode extends Node { this.astNode = assemblyBlock; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -31,11 +31,13 @@ export class AssemblyBlockNode extends Node { } for (const operation of this.astNode.operations ?? []) { - find(operation, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this + const foundNode = await find( + operation, + this.uri, + this.rootPath, + this.solFileIndex ); + await foundNode.accept(find, orphanNodes, this); } parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/AssemblyCallNode.ts b/server/src/parser/analyzer/nodes/AssemblyCallNode.ts index b5c76427..f9096ccf 100644 --- a/server/src/parser/analyzer/nodes/AssemblyCallNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyCallNode.ts @@ -28,20 +28,22 @@ export class AssemblyCallNode extends Node { this.astNode = assemblyCall; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const argument of this.astNode.arguments ?? []) { - find(argument, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent + const foundNode = await find( + argument, + this.uri, + this.rootPath, + this.solFileIndex ); + await foundNode.accept(find, orphanNodes, parent); } if (parent) { diff --git a/server/src/parser/analyzer/nodes/AssemblyCaseNode.ts b/server/src/parser/analyzer/nodes/AssemblyCaseNode.ts index 28c78fea..00062578 100644 --- a/server/src/parser/analyzer/nodes/AssemblyCaseNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyCaseNode.ts @@ -13,12 +13,12 @@ export class AssemblyCaseNode extends Node { this.astNode = assemblyCase; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -26,19 +26,18 @@ export class AssemblyCaseNode extends Node { } if (this.astNode.value) { - find( + const foundNode = await find( this.astNode.value, this.uri, this.rootPath, this.solFileIndex - ).accept(find, orphanNodes, this); + ); + await foundNode.accept(find, orphanNodes, this); } - find(this.astNode.block, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.block, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/AssemblyForNode.ts b/server/src/parser/analyzer/nodes/AssemblyForNode.ts index b448959f..ac35de19 100644 --- a/server/src/parser/analyzer/nodes/AssemblyForNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyForNode.ts @@ -13,39 +13,36 @@ export class AssemblyForNode extends Node { this.astNode = assemblyFor; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find(this.astNode.pre, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find(this.astNode.pre, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); + + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) + ).accept(find, orphanNodes, this); + await ( + await find(this.astNode.post, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); - find(this.astNode.post, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/AssemblyFunctionDefinitionNode.ts b/server/src/parser/analyzer/nodes/AssemblyFunctionDefinitionNode.ts index 06e3b132..df934dc0 100644 --- a/server/src/parser/analyzer/nodes/AssemblyFunctionDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyFunctionDefinitionNode.ts @@ -49,12 +49,12 @@ export class AssemblyFunctionDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -64,29 +64,22 @@ export class AssemblyFunctionDefinitionNode extends Node { this._findChildren(orphanNodes); for (const argument of this.astNode.arguments) { - find(argument, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(argument, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } for (const returnArgument of this.astNode.returnArguments) { - const typeNode = find( - returnArgument, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find(returnArgument, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); this.addTypeNode(typeNode); } - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/AssemblyFunctionReturnsNode.ts b/server/src/parser/analyzer/nodes/AssemblyFunctionReturnsNode.ts index 9bfddb23..b648d4f8 100644 --- a/server/src/parser/analyzer/nodes/AssemblyFunctionReturnsNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyFunctionReturnsNode.ts @@ -19,12 +19,12 @@ export class AssemblyFunctionReturnsNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/AssemblyIfNode.ts b/server/src/parser/analyzer/nodes/AssemblyIfNode.ts index fd98c6d4..c960d09b 100644 --- a/server/src/parser/analyzer/nodes/AssemblyIfNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyIfNode.ts @@ -13,30 +13,30 @@ export class AssemblyIfNode extends Node { this.astNode = assemblyIf; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/AssemblyLiteralNode.ts b/server/src/parser/analyzer/nodes/AssemblyLiteralNode.ts index 75b821a4..5d729cba 100644 --- a/server/src/parser/analyzer/nodes/AssemblyLiteralNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyLiteralNode.ts @@ -19,12 +19,12 @@ export class AssemblyLiteralNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/AssemblyLocalDefinitionNode.ts b/server/src/parser/analyzer/nodes/AssemblyLocalDefinitionNode.ts index 150e64bf..6cbe09ed 100644 --- a/server/src/parser/analyzer/nodes/AssemblyLocalDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyLocalDefinitionNode.ts @@ -39,16 +39,16 @@ export class AssemblyLocalDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const name of this.astNode.names ?? []) { - const identifierNode = find( + const identifierNode = await find( name, this.uri, this.rootPath, @@ -66,11 +66,13 @@ export class AssemblyLocalDefinitionNode extends Node { } if (this.astNode.expression) { - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/AssemblyMemberAccessNode.ts b/server/src/parser/analyzer/nodes/AssemblyMemberAccessNode.ts index 29450641..1d53f0e5 100644 --- a/server/src/parser/analyzer/nodes/AssemblyMemberAccessNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyMemberAccessNode.ts @@ -19,12 +19,12 @@ export class AssemblyMemberAccessNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/AssemblyStackAssignmentNode.ts b/server/src/parser/analyzer/nodes/AssemblyStackAssignmentNode.ts index cf4af824..6d17bb17 100644 --- a/server/src/parser/analyzer/nodes/AssemblyStackAssignmentNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblyStackAssignmentNode.ts @@ -19,12 +19,12 @@ export class AssemblyStackAssignmentNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/AssemblySwitchNode.ts b/server/src/parser/analyzer/nodes/AssemblySwitchNode.ts index 152c338b..441c2b89 100644 --- a/server/src/parser/analyzer/nodes/AssemblySwitchNode.ts +++ b/server/src/parser/analyzer/nodes/AssemblySwitchNode.ts @@ -18,31 +18,31 @@ export class AssemblySwitchNode extends Node { this.astNode = assemblySwitch; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); for (const caseNode of this.astNode.cases) { - find(caseNode, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(caseNode, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/BinaryOperationNode.ts b/server/src/parser/analyzer/nodes/BinaryOperationNode.ts index 57b080e6..13045b6d 100644 --- a/server/src/parser/analyzer/nodes/BinaryOperationNode.ts +++ b/server/src/parser/analyzer/nodes/BinaryOperationNode.ts @@ -18,24 +18,20 @@ export class BinaryOperationNode extends Node { this.astNode = binaryOperation; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find(this.astNode.left, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); - find(this.astNode.right, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(this.astNode.left, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); + await ( + await find(this.astNode.right, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); return this; } diff --git a/server/src/parser/analyzer/nodes/BlockNode.ts b/server/src/parser/analyzer/nodes/BlockNode.ts index f70f050c..2f9f2144 100644 --- a/server/src/parser/analyzer/nodes/BlockNode.ts +++ b/server/src/parser/analyzer/nodes/BlockNode.ts @@ -17,20 +17,18 @@ export class BlockNode extends Node { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const statement of this.astNode.statements) { - find(statement, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(statement, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } return this; diff --git a/server/src/parser/analyzer/nodes/BooleanLiteralNode.ts b/server/src/parser/analyzer/nodes/BooleanLiteralNode.ts index 86aeb322..c3db1d3e 100644 --- a/server/src/parser/analyzer/nodes/BooleanLiteralNode.ts +++ b/server/src/parser/analyzer/nodes/BooleanLiteralNode.ts @@ -19,12 +19,12 @@ export class BooleanLiteralNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/BreakNode.ts b/server/src/parser/analyzer/nodes/BreakNode.ts index ff694eb2..99ad7c9b 100644 --- a/server/src/parser/analyzer/nodes/BreakNode.ts +++ b/server/src/parser/analyzer/nodes/BreakNode.ts @@ -14,12 +14,12 @@ export class BreakNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/BreakStatementNode.ts b/server/src/parser/analyzer/nodes/BreakStatementNode.ts index cacdd2ca..1544cb41 100644 --- a/server/src/parser/analyzer/nodes/BreakStatementNode.ts +++ b/server/src/parser/analyzer/nodes/BreakStatementNode.ts @@ -19,12 +19,12 @@ export class BreakStatementNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/CatchClauseNode.ts b/server/src/parser/analyzer/nodes/CatchClauseNode.ts index 6681ac01..a2fe9234 100644 --- a/server/src/parser/analyzer/nodes/CatchClauseNode.ts +++ b/server/src/parser/analyzer/nodes/CatchClauseNode.ts @@ -13,12 +13,12 @@ export class CatchClauseNode extends Node { this.astNode = catchClause; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -26,18 +26,14 @@ export class CatchClauseNode extends Node { } for (const param of this.astNode.parameters || []) { - find(param, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(param, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/ConditionalNode.ts b/server/src/parser/analyzer/nodes/ConditionalNode.ts index 42ab47f2..7d0e5968 100644 --- a/server/src/parser/analyzer/nodes/ConditionalNode.ts +++ b/server/src/parser/analyzer/nodes/ConditionalNode.ts @@ -13,41 +13,47 @@ export class ConditionalNode extends Node { this.astNode = conditional; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.condition) { - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.trueExpression) { - find( - this.astNode.trueExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.trueExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.falseExpression) { - find( - this.astNode.falseExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.falseExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/ContinueNode.ts b/server/src/parser/analyzer/nodes/ContinueNode.ts index 0877ed37..dada4308 100644 --- a/server/src/parser/analyzer/nodes/ContinueNode.ts +++ b/server/src/parser/analyzer/nodes/ContinueNode.ts @@ -14,12 +14,12 @@ export class ContinueNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/ContinueStatementNode.ts b/server/src/parser/analyzer/nodes/ContinueStatementNode.ts index 150bf87c..10d5e0fe 100644 --- a/server/src/parser/analyzer/nodes/ContinueStatementNode.ts +++ b/server/src/parser/analyzer/nodes/ContinueStatementNode.ts @@ -19,12 +19,12 @@ export class ContinueStatementNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/ContractDefinitionNode.ts b/server/src/parser/analyzer/nodes/ContractDefinitionNode.ts index 136144d8..4f660bdb 100644 --- a/server/src/parser/analyzer/nodes/ContractDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/ContractDefinitionNode.ts @@ -68,12 +68,12 @@ export class ContractDefinitionNode extends AbstractContractDefinitionNode { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const searcher = this.solFileIndex[this.uri]?.searcher; @@ -83,11 +83,8 @@ export class ContractDefinitionNode extends AbstractContractDefinitionNode { } for (const baseContract of this.astNode.baseContracts) { - const inheritanceNode = find( - baseContract, - this.uri, - this.rootPath, - this.solFileIndex + const inheritanceNode = await ( + await find(baseContract, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); const inheritanceNodeDefinition = inheritanceNode.getDefinitionNode(); @@ -101,11 +98,9 @@ export class ContractDefinitionNode extends AbstractContractDefinitionNode { } for (const subNode of this.astNode.subNodes) { - find(subNode, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(subNode, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } // Find parent for orphanNodes from this contract in inheritance Nodes diff --git a/server/src/parser/analyzer/nodes/CustomErrorDefinitionNode.ts b/server/src/parser/analyzer/nodes/CustomErrorDefinitionNode.ts index aacb8b05..deeee70d 100644 --- a/server/src/parser/analyzer/nodes/CustomErrorDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/CustomErrorDefinitionNode.ts @@ -53,12 +53,12 @@ export class CustomErrorDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const searcher = this.solFileIndex[this.uri]?.searcher; @@ -68,11 +68,9 @@ export class CustomErrorDefinitionNode extends Node { } for (const parameter of this.astNode.parameters) { - find(parameter, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(parameter, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } const rootNode = findSourceUnitNode(parent); diff --git a/server/src/parser/analyzer/nodes/DecimalNumberNode.ts b/server/src/parser/analyzer/nodes/DecimalNumberNode.ts index fd674fb0..90b89999 100644 --- a/server/src/parser/analyzer/nodes/DecimalNumberNode.ts +++ b/server/src/parser/analyzer/nodes/DecimalNumberNode.ts @@ -19,12 +19,12 @@ export class DecimalNumberNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/DoWhileStatementNode.ts b/server/src/parser/analyzer/nodes/DoWhileStatementNode.ts index bb542f96..171c88ba 100644 --- a/server/src/parser/analyzer/nodes/DoWhileStatementNode.ts +++ b/server/src/parser/analyzer/nodes/DoWhileStatementNode.ts @@ -22,30 +22,30 @@ export class DoWhileStatementNode extends Node { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/ElementaryTypeNameNode.ts b/server/src/parser/analyzer/nodes/ElementaryTypeNameNode.ts index 383c310d..75dc2b12 100644 --- a/server/src/parser/analyzer/nodes/ElementaryTypeNameNode.ts +++ b/server/src/parser/analyzer/nodes/ElementaryTypeNameNode.ts @@ -25,12 +25,12 @@ export class ElementaryTypeNameNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/EmitStatementNode.ts b/server/src/parser/analyzer/nodes/EmitStatementNode.ts index 0ade7f5c..8c883744 100644 --- a/server/src/parser/analyzer/nodes/EmitStatementNode.ts +++ b/server/src/parser/analyzer/nodes/EmitStatementNode.ts @@ -18,19 +18,21 @@ export class EmitStatementNode extends Node { this.astNode = emitStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find( - this.astNode.eventCall, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.eventCall, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent, this); return this; diff --git a/server/src/parser/analyzer/nodes/EnumDefinitionNode.ts b/server/src/parser/analyzer/nodes/EnumDefinitionNode.ts index bb4abb43..3ef4a86b 100644 --- a/server/src/parser/analyzer/nodes/EnumDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/EnumDefinitionNode.ts @@ -53,12 +53,12 @@ export class EnumDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const searcher = this.solFileIndex[this.uri]?.searcher; @@ -68,11 +68,9 @@ export class EnumDefinitionNode extends Node { } for (const member of this.astNode.members) { - find(member, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(member, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } const rootNode = findSourceUnitNode(parent); diff --git a/server/src/parser/analyzer/nodes/EnumValueNode.ts b/server/src/parser/analyzer/nodes/EnumValueNode.ts index dd9f7687..6e03b029 100644 --- a/server/src/parser/analyzer/nodes/EnumValueNode.ts +++ b/server/src/parser/analyzer/nodes/EnumValueNode.ts @@ -28,12 +28,12 @@ export class EnumValueNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { diff --git a/server/src/parser/analyzer/nodes/EventDefinitionNode.ts b/server/src/parser/analyzer/nodes/EventDefinitionNode.ts index d3e20393..7ad6aabd 100644 --- a/server/src/parser/analyzer/nodes/EventDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/EventDefinitionNode.ts @@ -53,12 +53,12 @@ export class EventDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const searcher = this.solFileIndex[this.uri]?.searcher; @@ -68,11 +68,9 @@ export class EventDefinitionNode extends Node { } for (const parameter of this.astNode.parameters) { - find(parameter, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(parameter, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } const rootNode = findSourceUnitNode(parent); diff --git a/server/src/parser/analyzer/nodes/ExpressionStatementNode.ts b/server/src/parser/analyzer/nodes/ExpressionStatementNode.ts index 0675ddd5..4a2103b8 100644 --- a/server/src/parser/analyzer/nodes/ExpressionStatementNode.ts +++ b/server/src/parser/analyzer/nodes/ExpressionStatementNode.ts @@ -18,20 +18,22 @@ export class ExpressionStatementNode extends Node { this.astNode = expressionStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (this.astNode.expression) { - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/FileLevelConstantNode.ts b/server/src/parser/analyzer/nodes/FileLevelConstantNode.ts index f099dec9..20afc217 100644 --- a/server/src/parser/analyzer/nodes/FileLevelConstantNode.ts +++ b/server/src/parser/analyzer/nodes/FileLevelConstantNode.ts @@ -44,12 +44,12 @@ export class FileLevelConstantNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -58,11 +58,13 @@ export class FileLevelConstantNode extends Node { // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.typeName) { - let typeNode = find( - this.astNode.typeName, - this.uri, - this.rootPath, - this.solFileIndex + let typeNode = await ( + await find( + this.astNode.typeName, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); this.addTypeNode(typeNode); @@ -79,11 +81,13 @@ export class FileLevelConstantNode extends Node { // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.initialValue) { - find( - this.astNode.initialValue, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.initialValue, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/ForStatementNode.ts b/server/src/parser/analyzer/nodes/ForStatementNode.ts index 5e95b1af..8fc03931 100644 --- a/server/src/parser/analyzer/nodes/ForStatementNode.ts +++ b/server/src/parser/analyzer/nodes/ForStatementNode.ts @@ -17,12 +17,12 @@ export class ForStatementNode extends Node { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -30,38 +30,42 @@ export class ForStatementNode extends Node { } if (this.astNode.initExpression) { - find( - this.astNode.initExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.initExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } if (this.astNode.conditionExpression) { - find( - this.astNode.conditionExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.conditionExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.loopExpression) { - find( - this.astNode.loopExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.loopExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/FunctionCallNode.ts b/server/src/parser/analyzer/nodes/FunctionCallNode.ts index fa488c62..c7ff0cf2 100644 --- a/server/src/parser/analyzer/nodes/FunctionCallNode.ts +++ b/server/src/parser/analyzer/nodes/FunctionCallNode.ts @@ -19,38 +19,38 @@ export class FunctionCallNode extends Node { this.astNode = functionCall; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (expression?.type !== "EmitStatement") { expression = this; } - const expressionNode = find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + const expressionNode = await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent, expression); for (const argument of this.astNode.arguments) { - find(argument, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(argument, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } const definitionTypes = expressionNode.getTypeNodes(); const searcher = this.solFileIndex[this.uri]?.searcher; for (const identifier of this.astNode.identifiers) { - const identifierNode = find( + const identifierNode = await find( identifier, this.uri, this.rootPath, diff --git a/server/src/parser/analyzer/nodes/FunctionDefinitionNode.ts b/server/src/parser/analyzer/nodes/FunctionDefinitionNode.ts index 2417d94c..e899e992 100644 --- a/server/src/parser/analyzer/nodes/FunctionDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/FunctionDefinitionNode.ts @@ -80,12 +80,12 @@ export class FunctionDefinitionNode extends AbstractFunctionDefinitionNode { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -102,49 +102,41 @@ export class FunctionDefinitionNode extends AbstractFunctionDefinitionNode { this._findChildren(orphanNodes); for (const override of this.astNode.override || []) { - find(override, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(override, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } for (const param of this.astNode.parameters) { - find(param, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(param, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } for (const returnParam of this.astNode.returnParameters ?? []) { - const typeNode = find( - returnParam, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find(returnParam, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); this.addTypeNode(typeNode); } for (const modifier of this.astNode.modifiers ?? []) { - const typeNode = find( - modifier, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find(modifier, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); this.addTypeNode(typeNode); } if (this.astNode.body) { - find( - this.astNode.body, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.body, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } diff --git a/server/src/parser/analyzer/nodes/FunctionTypeNameNode.ts b/server/src/parser/analyzer/nodes/FunctionTypeNameNode.ts index 60a32d3c..2145cccc 100644 --- a/server/src/parser/analyzer/nodes/FunctionTypeNameNode.ts +++ b/server/src/parser/analyzer/nodes/FunctionTypeNameNode.ts @@ -19,12 +19,12 @@ export class FunctionTypeNameNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/HexLiteralNode.ts b/server/src/parser/analyzer/nodes/HexLiteralNode.ts index b37015c8..69797062 100644 --- a/server/src/parser/analyzer/nodes/HexLiteralNode.ts +++ b/server/src/parser/analyzer/nodes/HexLiteralNode.ts @@ -14,12 +14,12 @@ export class HexLiteralNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/HexNumberNode.ts b/server/src/parser/analyzer/nodes/HexNumberNode.ts index 12138dbf..adf5970f 100644 --- a/server/src/parser/analyzer/nodes/HexNumberNode.ts +++ b/server/src/parser/analyzer/nodes/HexNumberNode.ts @@ -14,12 +14,12 @@ export class HexNumberNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/IdentifierNode.ts b/server/src/parser/analyzer/nodes/IdentifierNode.ts index aa9bbd86..0ad37b69 100644 --- a/server/src/parser/analyzer/nodes/IdentifierNode.ts +++ b/server/src/parser/analyzer/nodes/IdentifierNode.ts @@ -68,12 +68,12 @@ export class IdentifierNode extends AbstractIdentifierNode { } } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (expression?.type === "ImportDirective" && parent) { diff --git a/server/src/parser/analyzer/nodes/IfStatementNode.ts b/server/src/parser/analyzer/nodes/IfStatementNode.ts index 741e6a0e..e13c4e61 100644 --- a/server/src/parser/analyzer/nodes/IfStatementNode.ts +++ b/server/src/parser/analyzer/nodes/IfStatementNode.ts @@ -17,37 +17,43 @@ export class IfStatementNode extends Node { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex - ).accept(find, orphanNodes, this); - find( - this.astNode.trueBody, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); - - if (this.astNode.falseBody) { - find( - this.astNode.falseBody, + await ( + await find( + this.astNode.trueBody, this.uri, this.rootPath, this.solFileIndex + ) + ).accept(find, orphanNodes, this); + + if (this.astNode.falseBody) { + await ( + await find( + this.astNode.falseBody, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } diff --git a/server/src/parser/analyzer/nodes/ImportDirectiveNode.ts b/server/src/parser/analyzer/nodes/ImportDirectiveNode.ts index 36540bf9..92d98995 100644 --- a/server/src/parser/analyzer/nodes/ImportDirectiveNode.ts +++ b/server/src/parser/analyzer/nodes/ImportDirectiveNode.ts @@ -1,7 +1,5 @@ -import * as path from "path"; import * as fs from "fs"; -import { resolveDependency } from "@analyzer/resolver"; import { ImportDirective, FinderType, @@ -9,7 +7,6 @@ import { Node, SourceUnitNode, ImportDirectiveNode as AbstractImportDirectiveNode, - SolFileState, } from "@common/types"; import { analyzeSolFile } from "@analyzer/analyzeSolFile"; import { toUnixStyle } from "../../../utils/index"; @@ -24,28 +21,13 @@ export class ImportDirectiveNode extends AbstractImportDirectiveNode { importDirective: ImportDirective, uri: string, rootPath: string, - documentsAnalyzer: SolFileIndexMap + solFileIndex: SolFileIndexMap, + resolvedUri: string ) { - super( - importDirective, - uri, - rootPath, - documentsAnalyzer, - importDirective.path - ); + super(importDirective, uri, rootPath, solFileIndex, importDirective.path); this.realUri = toUnixStyle(fs.realpathSync(uri)); - const remappings = documentsAnalyzer[this.realUri]?.project.remappings; - - try { - this.uri = resolveDependency( - path.dirname(this.realUri), - importDirective.path, - remappings - ); - } catch (err) { - this.uri = ""; - } + this.uri = resolvedUri; // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (importDirective.pathLiteral && importDirective.pathLiteral.loc) { @@ -63,12 +45,12 @@ export class ImportDirectiveNode extends AbstractImportDirectiveNode { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -76,11 +58,8 @@ export class ImportDirectiveNode extends AbstractImportDirectiveNode { } const solFileEntry = this.solFileIndex[toUnixStyle(this.uri)]; - if ( - solFileEntry !== undefined && - solFileEntry.status !== SolFileState.ANALYZED - ) { - analyzeSolFile({ solFileIndex: this.solFileIndex }, solFileEntry); + if (solFileEntry !== undefined && !solFileEntry.isAnalyzed()) { + await analyzeSolFile({ solFileIndex: this.solFileIndex }, solFileEntry); // Analyze will change root node so we need to return root node after analyze const rootNode = this.solFileIndex[this.realUri]?.analyzerTree.tree; @@ -108,21 +87,33 @@ export class ImportDirectiveNode extends AbstractImportDirectiveNode { const aliesNodes: Node[] = []; for (const symbolAliasesIdentifier of this.astNode .symbolAliasesIdentifiers || []) { - const importedContractNode = find( + const foundImportedContractNode = await find( symbolAliasesIdentifier[0], this.realUri, this.rootPath, this.solFileIndex - ).accept(find, orphanNodes, this); + ); + const importedContractNode = await foundImportedContractNode.accept( + find, + orphanNodes, + this + ); // Check if alias exist for importedContractNode if (symbolAliasesIdentifier[1]) { - const importedContractAliasNode = find( + const foundImportedContractAliasNode = await find( symbolAliasesIdentifier[1], this.realUri, this.rootPath, this.solFileIndex - ).accept(find, orphanNodes, importedContractNode, this); + ); + const importedContractAliasNode = + await foundImportedContractAliasNode.accept( + find, + orphanNodes, + importedContractNode, + this + ); importedContractAliasNode.setAliasName(importedContractNode.getName()); aliesNodes.push(importedContractAliasNode); diff --git a/server/src/parser/analyzer/nodes/IndexAccessNode.ts b/server/src/parser/analyzer/nodes/IndexAccessNode.ts index 47e514b6..2eaa9f46 100644 --- a/server/src/parser/analyzer/nodes/IndexAccessNode.ts +++ b/server/src/parser/analyzer/nodes/IndexAccessNode.ts @@ -13,25 +13,20 @@ export class IndexAccessNode extends Node { this.astNode = indexAccess; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - const typeNode = find( - this.astNode.base, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find(this.astNode.base, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, parent, this); - find(this.astNode.index, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(this.astNode.index, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); return typeNode; } diff --git a/server/src/parser/analyzer/nodes/IndexRangeAccessNode.ts b/server/src/parser/analyzer/nodes/IndexRangeAccessNode.ts index eee3f08d..f198dd9c 100644 --- a/server/src/parser/analyzer/nodes/IndexRangeAccessNode.ts +++ b/server/src/parser/analyzer/nodes/IndexRangeAccessNode.ts @@ -18,36 +18,37 @@ export class IndexRangeAccessNode extends Node { this.astNode = indexRangeAccess; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - const typeNode = find( - this.astNode.base, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find(this.astNode.base, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, parent, this); if (this.astNode.indexStart) { - find( - this.astNode.indexStart, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.indexStart, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } if (this.astNode.indexEnd) { - find( - this.astNode.indexEnd, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.indexEnd, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/InheritanceSpecifierNode.ts b/server/src/parser/analyzer/nodes/InheritanceSpecifierNode.ts index 67b5e573..99afdccd 100644 --- a/server/src/parser/analyzer/nodes/InheritanceSpecifierNode.ts +++ b/server/src/parser/analyzer/nodes/InheritanceSpecifierNode.ts @@ -18,27 +18,27 @@ export class InheritanceSpecifierNode extends Node { this.astNode = inheritanceSpecifier; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - const baseNode = find( - this.astNode.baseName, - this.uri, - this.rootPath, - this.solFileIndex + const baseNode = await ( + await find( + this.astNode.baseName, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); for (const argument of this.astNode.arguments) { - find(argument, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(argument, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } return baseNode; diff --git a/server/src/parser/analyzer/nodes/InlineAssemblyStatementNode.ts b/server/src/parser/analyzer/nodes/InlineAssemblyStatementNode.ts index c0a5f784..b4a91a8b 100644 --- a/server/src/parser/analyzer/nodes/InlineAssemblyStatementNode.ts +++ b/server/src/parser/analyzer/nodes/InlineAssemblyStatementNode.ts @@ -18,21 +18,23 @@ export class InlineAssemblyStatementNode extends Node { this.astNode = inlineAssemblyStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.body) { - find( - this.astNode.body, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.body, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/LabelDefinitionNode.ts b/server/src/parser/analyzer/nodes/LabelDefinitionNode.ts index 6ee900fe..1c63b75d 100644 --- a/server/src/parser/analyzer/nodes/LabelDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/LabelDefinitionNode.ts @@ -27,12 +27,12 @@ export class LabelDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/MappingNode.ts b/server/src/parser/analyzer/nodes/MappingNode.ts index 92d188f6..45e07f27 100644 --- a/server/src/parser/analyzer/nodes/MappingNode.ts +++ b/server/src/parser/analyzer/nodes/MappingNode.ts @@ -13,25 +13,29 @@ export class MappingNode extends Node { this.astNode = mapping; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find( - this.astNode.keyType, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.keyType, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); - const typeNode = find( - this.astNode.valueType, - this.uri, - this.rootPath, - this.solFileIndex + const typeNode = await ( + await find( + this.astNode.valueType, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); this.addTypeNode(typeNode); diff --git a/server/src/parser/analyzer/nodes/MemberAccessNode.ts b/server/src/parser/analyzer/nodes/MemberAccessNode.ts index d3178792..247c59a1 100644 --- a/server/src/parser/analyzer/nodes/MemberAccessNode.ts +++ b/server/src/parser/analyzer/nodes/MemberAccessNode.ts @@ -60,19 +60,21 @@ export class MemberAccessNode extends IMemberAccessNode { } } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - const expressionNode = find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + const expressionNode = await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent, this); this.setPreviousMemberAccessNode(expressionNode); diff --git a/server/src/parser/analyzer/nodes/ModifierDefinitionNode.ts b/server/src/parser/analyzer/nodes/ModifierDefinitionNode.ts index 058f2980..d913ca58 100644 --- a/server/src/parser/analyzer/nodes/ModifierDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/ModifierDefinitionNode.ts @@ -53,12 +53,12 @@ export class ModifierDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -66,27 +66,25 @@ export class ModifierDefinitionNode extends Node { } for (const override of this.astNode.override || []) { - find(override, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(override, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } for (const param of this.astNode.parameters || []) { - find(param, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(param, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } if (this.astNode.body) { - find( - this.astNode.body, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.body, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); } diff --git a/server/src/parser/analyzer/nodes/ModifierInvocationNode.ts b/server/src/parser/analyzer/nodes/ModifierInvocationNode.ts index a95679df..d3d53741 100644 --- a/server/src/parser/analyzer/nodes/ModifierInvocationNode.ts +++ b/server/src/parser/analyzer/nodes/ModifierInvocationNode.ts @@ -41,20 +41,18 @@ export class ModifierInvocationNode extends Node { } } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const argument of this.astNode.arguments || []) { - find(argument, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(argument, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } if (!parent) { diff --git a/server/src/parser/analyzer/nodes/NameValueExpressionNode.ts b/server/src/parser/analyzer/nodes/NameValueExpressionNode.ts index f5a92a39..e0992ddc 100644 --- a/server/src/parser/analyzer/nodes/NameValueExpressionNode.ts +++ b/server/src/parser/analyzer/nodes/NameValueExpressionNode.ts @@ -18,25 +18,29 @@ export class NameValueExpressionNode extends Node { this.astNode = nameValueExpression; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); - find( - this.astNode.arguments, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.arguments, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); return this; diff --git a/server/src/parser/analyzer/nodes/NameValueListNode.ts b/server/src/parser/analyzer/nodes/NameValueListNode.ts index 3f2763e3..86f36ad5 100644 --- a/server/src/parser/analyzer/nodes/NameValueListNode.ts +++ b/server/src/parser/analyzer/nodes/NameValueListNode.ts @@ -18,20 +18,18 @@ export class NameValueListNode extends Node { this.astNode = nameValueList; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const identifier of this.astNode.identifiers) { - find(identifier, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(identifier, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } return this; } diff --git a/server/src/parser/analyzer/nodes/NewExpressionNode.ts b/server/src/parser/analyzer/nodes/NewExpressionNode.ts index 5209592b..b9b8df8e 100644 --- a/server/src/parser/analyzer/nodes/NewExpressionNode.ts +++ b/server/src/parser/analyzer/nodes/NewExpressionNode.ts @@ -18,21 +18,23 @@ export class NewExpressionNode extends Node { this.astNode = newExpression; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.typeName) { - find( - this.astNode.typeName, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.typeName, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/NumberLiteralNode.ts b/server/src/parser/analyzer/nodes/NumberLiteralNode.ts index 3db5ea1c..822703b5 100644 --- a/server/src/parser/analyzer/nodes/NumberLiteralNode.ts +++ b/server/src/parser/analyzer/nodes/NumberLiteralNode.ts @@ -19,12 +19,12 @@ export class NumberLiteralNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/PragmaDirectiveNode.ts b/server/src/parser/analyzer/nodes/PragmaDirectiveNode.ts index c794adc6..7d2bf39f 100644 --- a/server/src/parser/analyzer/nodes/PragmaDirectiveNode.ts +++ b/server/src/parser/analyzer/nodes/PragmaDirectiveNode.ts @@ -19,12 +19,12 @@ export class PragmaDirectiveNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/ReturnStatementNode.ts b/server/src/parser/analyzer/nodes/ReturnStatementNode.ts index 19bd2027..544a1d3b 100644 --- a/server/src/parser/analyzer/nodes/ReturnStatementNode.ts +++ b/server/src/parser/analyzer/nodes/ReturnStatementNode.ts @@ -18,20 +18,22 @@ export class ReturnStatementNode extends Node { this.astNode = returnStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (this.astNode.expression) { - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/RevertStatementNode.ts b/server/src/parser/analyzer/nodes/RevertStatementNode.ts index b4c39eab..6d358226 100644 --- a/server/src/parser/analyzer/nodes/RevertStatementNode.ts +++ b/server/src/parser/analyzer/nodes/RevertStatementNode.ts @@ -18,19 +18,21 @@ export class RevertStatementNode extends Node { this.astNode = revertStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find( - this.astNode.revertCall, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.revertCall, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent, this); return this; diff --git a/server/src/parser/analyzer/nodes/SourceUnitNode.ts b/server/src/parser/analyzer/nodes/SourceUnitNode.ts index 1992f105..041bf859 100644 --- a/server/src/parser/analyzer/nodes/SourceUnitNode.ts +++ b/server/src/parser/analyzer/nodes/SourceUnitNode.ts @@ -23,12 +23,12 @@ export class SourceUnitNode extends AbstractSourceUnitNode { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const documentAnalyzer = this.solFileIndex[this.uri]; @@ -47,11 +47,9 @@ export class SourceUnitNode extends AbstractSourceUnitNode { } for (const child of this.astNode.children) { - find(child, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(child, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } return this; diff --git a/server/src/parser/analyzer/nodes/StateVariableDeclarationNode.ts b/server/src/parser/analyzer/nodes/StateVariableDeclarationNode.ts index 66a0f851..b98a7dd5 100644 --- a/server/src/parser/analyzer/nodes/StateVariableDeclarationNode.ts +++ b/server/src/parser/analyzer/nodes/StateVariableDeclarationNode.ts @@ -28,28 +28,28 @@ export class StateVariableDeclarationNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const variable of this.astNode.variables) { - find(variable, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(variable, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } if (this.astNode.initialValue) { - find( - this.astNode.initialValue, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.initialValue, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/StringLiteralNode.ts b/server/src/parser/analyzer/nodes/StringLiteralNode.ts index b82d3555..829ec4c2 100644 --- a/server/src/parser/analyzer/nodes/StringLiteralNode.ts +++ b/server/src/parser/analyzer/nodes/StringLiteralNode.ts @@ -19,12 +19,12 @@ export class StringLiteralNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/StructDefinitionNode.ts b/server/src/parser/analyzer/nodes/StructDefinitionNode.ts index d098cbb5..5e219d40 100644 --- a/server/src/parser/analyzer/nodes/StructDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/StructDefinitionNode.ts @@ -57,12 +57,12 @@ export class StructDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -70,11 +70,9 @@ export class StructDefinitionNode extends Node { } for (const member of this.astNode.members) { - find(member, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(member, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } const rootNode = findSourceUnitNode(parent); diff --git a/server/src/parser/analyzer/nodes/SubAssemblyNode.ts b/server/src/parser/analyzer/nodes/SubAssemblyNode.ts index 1c549650..77041004 100644 --- a/server/src/parser/analyzer/nodes/SubAssemblyNode.ts +++ b/server/src/parser/analyzer/nodes/SubAssemblyNode.ts @@ -14,12 +14,12 @@ export class SubAssemblyNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/ThrowStatementNode.ts b/server/src/parser/analyzer/nodes/ThrowStatementNode.ts index 7912dcae..0ba159d2 100644 --- a/server/src/parser/analyzer/nodes/ThrowStatementNode.ts +++ b/server/src/parser/analyzer/nodes/ThrowStatementNode.ts @@ -19,12 +19,12 @@ export class ThrowStatementNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/TryStatementNode.ts b/server/src/parser/analyzer/nodes/TryStatementNode.ts index 18189e96..f4f20e5c 100644 --- a/server/src/parser/analyzer/nodes/TryStatementNode.ts +++ b/server/src/parser/analyzer/nodes/TryStatementNode.ts @@ -13,45 +13,41 @@ export class TryStatementNode extends Node { this.astNode = tryStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.expression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.expression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); for (const returnParameter of this.astNode.returnParameters || []) { - find(returnParameter, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(returnParameter, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); for (const catchClause of this.astNode.catchClauses ?? []) { - find(catchClause, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); + await ( + await find(catchClause, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, this); } parent?.addChild(this); diff --git a/server/src/parser/analyzer/nodes/TupleExpressionNode.ts b/server/src/parser/analyzer/nodes/TupleExpressionNode.ts index 14afc10f..1569d9b7 100644 --- a/server/src/parser/analyzer/nodes/TupleExpressionNode.ts +++ b/server/src/parser/analyzer/nodes/TupleExpressionNode.ts @@ -18,21 +18,19 @@ export class TupleExpressionNode extends Node { this.astNode = tupleExpression; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const component of this.astNode.components) { if (component) { - find(component, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(component, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } } diff --git a/server/src/parser/analyzer/nodes/TypeDefinitionNode.ts b/server/src/parser/analyzer/nodes/TypeDefinitionNode.ts index 78bccd27..0657e930 100644 --- a/server/src/parser/analyzer/nodes/TypeDefinitionNode.ts +++ b/server/src/parser/analyzer/nodes/TypeDefinitionNode.ts @@ -53,12 +53,12 @@ export class TypeDefinitionNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); const searcher = this.solFileIndex[this.uri]?.searcher; diff --git a/server/src/parser/analyzer/nodes/TypeNameExpressionNode.ts b/server/src/parser/analyzer/nodes/TypeNameExpressionNode.ts index 29c02dc1..f3696737 100644 --- a/server/src/parser/analyzer/nodes/TypeNameExpressionNode.ts +++ b/server/src/parser/analyzer/nodes/TypeNameExpressionNode.ts @@ -19,12 +19,12 @@ export class TypeNameExpressionNode extends Node { // TO-DO: Implement name location for rename } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // TO-DO: Method not implemented return this; diff --git a/server/src/parser/analyzer/nodes/UnaryOperationNode.ts b/server/src/parser/analyzer/nodes/UnaryOperationNode.ts index 221ab338..c08d3dfe 100644 --- a/server/src/parser/analyzer/nodes/UnaryOperationNode.ts +++ b/server/src/parser/analyzer/nodes/UnaryOperationNode.ts @@ -18,19 +18,21 @@ export class UnaryOperationNode extends Node { this.astNode = unaryOperation; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); - find( - this.astNode.subExpression, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.subExpression, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); return this; diff --git a/server/src/parser/analyzer/nodes/UncheckedStatementNode.ts b/server/src/parser/analyzer/nodes/UncheckedStatementNode.ts index 93d81f8e..d5cf00e9 100644 --- a/server/src/parser/analyzer/nodes/UncheckedStatementNode.ts +++ b/server/src/parser/analyzer/nodes/UncheckedStatementNode.ts @@ -18,21 +18,23 @@ export class UncheckedStatementNode extends Node { this.astNode = uncheckedStatement; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions if (this.astNode.block) { - find( - this.astNode.block, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.block, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/UserDefinedTypeNameNode.ts b/server/src/parser/analyzer/nodes/UserDefinedTypeNameNode.ts index 6cbb05e4..cd0d9e90 100644 --- a/server/src/parser/analyzer/nodes/UserDefinedTypeNameNode.ts +++ b/server/src/parser/analyzer/nodes/UserDefinedTypeNameNode.ts @@ -66,12 +66,12 @@ export class UserDefinedTypeNameNode extends Node { } } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (!parent) { diff --git a/server/src/parser/analyzer/nodes/UsingForDeclarationNode.ts b/server/src/parser/analyzer/nodes/UsingForDeclarationNode.ts index 667ab9f2..dbd097e0 100644 --- a/server/src/parser/analyzer/nodes/UsingForDeclarationNode.ts +++ b/server/src/parser/analyzer/nodes/UsingForDeclarationNode.ts @@ -42,20 +42,22 @@ export class UsingForDeclarationNode extends Node { } } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (this.astNode.typeName) { - find( - this.astNode.typeName, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.typeName, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/VariableDeclarationNode.ts b/server/src/parser/analyzer/nodes/VariableDeclarationNode.ts index 260f37d6..36b3645d 100644 --- a/server/src/parser/analyzer/nodes/VariableDeclarationNode.ts +++ b/server/src/parser/analyzer/nodes/VariableDeclarationNode.ts @@ -56,12 +56,12 @@ export class VariableDeclarationNode extends AbstractVariableDeclarationNode { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { @@ -69,11 +69,13 @@ export class VariableDeclarationNode extends AbstractVariableDeclarationNode { } if (this.astNode.typeName) { - let typeNode = find( - this.astNode.typeName, - this.uri, - this.rootPath, - this.solFileIndex + let typeNode = await ( + await find( + this.astNode.typeName, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, this); this.addTypeNode(typeNode); diff --git a/server/src/parser/analyzer/nodes/VariableDeclarationStatementNode.ts b/server/src/parser/analyzer/nodes/VariableDeclarationStatementNode.ts index 41cb9ee7..74567e7b 100644 --- a/server/src/parser/analyzer/nodes/VariableDeclarationStatementNode.ts +++ b/server/src/parser/analyzer/nodes/VariableDeclarationStatementNode.ts @@ -28,30 +28,30 @@ export class VariableDeclarationStatementNode extends Node { return this; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); for (const variable of this.astNode.variables) { if (variable) { - find(variable, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - parent - ); + await ( + await find(variable, this.uri, this.rootPath, this.solFileIndex) + ).accept(find, orphanNodes, parent); } } if (this.astNode.initialValue) { - find( - this.astNode.initialValue, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.initialValue, + this.uri, + this.rootPath, + this.solFileIndex + ) ).accept(find, orphanNodes, parent); } diff --git a/server/src/parser/analyzer/nodes/WhileStatementNode.ts b/server/src/parser/analyzer/nodes/WhileStatementNode.ts index d07e5052..9c3689e6 100644 --- a/server/src/parser/analyzer/nodes/WhileStatementNode.ts +++ b/server/src/parser/analyzer/nodes/WhileStatementNode.ts @@ -22,29 +22,29 @@ export class WhileStatementNode extends Node { return undefined; } - public accept( + public async accept( find: FinderType, orphanNodes: Node[], parent?: Node, expression?: Node - ): Node { + ): Promise { this.setExpressionNode(expression); if (parent) { this.setParent(parent); } - find( - this.astNode.condition, - this.uri, - this.rootPath, - this.solFileIndex + await ( + await find( + this.astNode.condition, + this.uri, + this.rootPath, + this.solFileIndex + ) + ).accept(find, orphanNodes, this); + await ( + await find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex) ).accept(find, orphanNodes, this); - find(this.astNode.body, this.uri, this.rootPath, this.solFileIndex).accept( - find, - orphanNodes, - this - ); parent?.addChild(this); diff --git a/server/src/parser/analyzer/resolver/index.ts b/server/src/parser/analyzer/resolver/index.ts deleted file mode 100644 index a222a78a..00000000 --- a/server/src/parser/analyzer/resolver/index.ts +++ /dev/null @@ -1,25 +0,0 @@ -import * as fs from "fs"; -import * as path from "path"; -import { Remapping } from "@common/types"; -import { toUnixStyle } from "../../../utils"; - -export function resolveDependency( - cwd: string, - originalPath: string, - pathRemappings: Remapping[] = [] -): string { - if (pathRemappings.length && !originalPath.startsWith(".")) { - for (const { from, to } of pathRemappings) { - if (originalPath.startsWith(from)) { - const remappedPath = path.join(to, originalPath.slice(from.length)); - return toUnixStyle(fs.realpathSync(remappedPath)); - } - } - } - - const resolvedPath = require.resolve(originalPath, { - paths: [fs.realpathSync(cwd)], - }); - - return toUnixStyle(fs.realpathSync(resolvedPath)); -} diff --git a/server/src/parser/common/event/index.ts b/server/src/parser/common/event/index.ts index e0f5a330..3df68183 100644 --- a/server/src/parser/common/event/index.ts +++ b/server/src/parser/common/event/index.ts @@ -1,5 +1,4 @@ export interface IndexFileData { - jobId: number; path: string; current: number; total: number; diff --git a/server/src/parser/common/types/index.ts b/server/src/parser/common/types/index.ts index 648cff24..681a19d4 100644 --- a/server/src/parser/common/types/index.ts +++ b/server/src/parser/common/types/index.ts @@ -81,8 +81,6 @@ import type { TypeDefinition, } from "@solidity-parser/parser/dist/src/ast-types"; -import { WorkspaceFolder } from "vscode-languageserver-protocol"; - import { Position as VSCodePosition, WorkspaceEdit, @@ -105,6 +103,7 @@ import { } from "vscode-languageserver-types"; import { TextDocument } from "vscode-languageserver-textdocument"; +import { Project } from "../../../frameworks/base/Project"; export { ASTNode, @@ -358,31 +357,8 @@ export enum SolFileState { ERRORED = "ERRORED", } -export enum ClientTrackingState { - UNTRACKED = "UNTRACKED", - TRACKED = "TRACKED", -} - -export type SolProjectType = "hardhat" | "none"; - -export interface Remapping { - from: string; - to: string; -} - -export interface ISolProject { - type: SolProjectType; - /** - * The basepath of the solidity project. - */ - basePath: string; - configPath: string; - workspaceFolder: WorkspaceFolder; - remappings?: Remapping[]; -} - export interface SolProjectMap { - [key: string]: ISolProject; + [key: string]: Project; } export interface ISolFileEntry { @@ -397,7 +373,7 @@ export interface ISolFileEntry { */ text: string | undefined; - project: ISolProject; + project: Project; /** * AST that we get from @solidity-parser/parser. @@ -415,8 +391,6 @@ export interface ISolFileEntry { */ status: SolFileState; - tracking: ClientTrackingState; - searcher: Searcher; /** @@ -428,18 +402,6 @@ export interface ISolFileEntry { * Has an analysis pass succeeded, allowing a user to operate on the ast. */ isAnalyzed(): boolean; - - /** - * Set the Solidity file as being the responsibility of the client - * but with no changes to the underlying file, - */ - track(): void; - - /** - * Set the Solidity file as no longer being the responsibility of the - * client. - */ - untrack(): void; } /** @@ -468,7 +430,7 @@ export type FinderType = ( uri: string, rootPath: string, documentsAnalyzer: SolFileIndexMap -) => Node; +) => Promise; /** * documentsAnalyzer Map { [uri: string]: DocumentAnalyzer } have all documentsAnalyzer class instances used for handle imports on first project start. @@ -576,13 +538,13 @@ export abstract class Node { baseASTNode: BaseASTNode | EmptyNodeType, uri: string, rootPath: string, - documentsAnalyzer: SolFileIndexMap, + solFileIndex: SolFileIndexMap, name: string | undefined ) { this.type = baseASTNode.type; this.uri = uri; this.rootPath = rootPath; - this.solFileIndex = documentsAnalyzer; + this.solFileIndex = solFileIndex; this.name = name; } @@ -715,7 +677,7 @@ export abstract class Node { orphanNodes: Node[], parent?: Node, expression?: Node - ): Node; + ): Promise; } export class EmptyNode extends Node { @@ -731,7 +693,7 @@ export class EmptyNode extends Node { this.astNode = emptyNode; } - public accept( + public async accept( // eslint-disable-next-line @typescript-eslint/no-unused-vars find: FinderType, // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -740,7 +702,7 @@ export class EmptyNode extends Node { parent?: Node, // eslint-disable-next-line @typescript-eslint/no-unused-vars expression?: Node - ): Node { + ): Promise { return this; } } diff --git a/server/src/queries/getOpenDocumentsInProject.ts b/server/src/queries/getOpenDocumentsInProject.ts index f5b2601e..2460dff6 100644 --- a/server/src/queries/getOpenDocumentsInProject.ts +++ b/server/src/queries/getOpenDocumentsInProject.ts @@ -1,37 +1,21 @@ -import { isHardhatProject } from "@analyzer/HardhatProject"; -import { - ClientTrackingState, - ISolFileEntry, - ISolProject, - TextDocument, -} from "@common/types"; +import { ISolFileEntry, TextDocument } from "@common/types"; +import { Project } from "../frameworks/base/Project"; import { ServerState } from "../types"; import { runningOnWindows } from "../utils/operatingSystem"; export function getOpenDocumentsInProject( serverState: ServerState, - project: ISolProject + project: Project ): TextDocument[] { - if (!isHardhatProject(project)) { - throw new Error("Cannot query for docs in non-hardhat project"); - } - const openSolFilesInProj = Object.values(serverState.solFileIndex).filter( - (solfile) => - solfile.tracking === ClientTrackingState.TRACKED && - isHardhatProject(solfile.project) && - solfile.project.basePath === project.basePath + (solfile) => solfile.project.basePath === project.basePath ); - const openDocs = openSolFilesInProj + const openDocuments = openSolFilesInProj .map((solFile) => lookupDocForSolFileEntry(serverState, solFile)) .filter((doc): doc is TextDocument => doc !== undefined); - if (openDocs.length < openSolFilesInProj.length) { - serverState.logger.info("Open document lookup has dropped files"); - } - - return openDocs; + return openDocuments; } function lookupDocForSolFileEntry( diff --git a/server/src/server.ts b/server/src/server.ts index 80ce5ce2..4eaecc8b 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -2,57 +2,33 @@ import { Connection } from "vscode-languageserver"; import { TextDocuments } from "vscode-languageserver/node"; import { TextDocument } from "vscode-languageserver-textdocument"; import { Logger } from "@utils/Logger"; -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; +import { WorkspaceFileRetriever } from "@utils/WorkspaceFileRetriever"; import { onHover } from "@services/hover/onHover"; import { onInitialize } from "@services/initialization/onInitialize"; import { onInitialized } from "@services/initialization/onInitialized"; import { onSignatureHelp } from "@services/signaturehelp/onSignatureHelp"; import { onCompletion } from "@services/completion/onCompletion"; import { onCodeAction } from "@services/codeactions/onCodeAction"; -import { compilerProcessFactory } from "@services/validation/compilerProcessFactory"; import { onDefinition } from "@services/definition/onDefinition"; import { onTypeDefinition } from "@services/typeDefinition/onTypeDefinition"; import { onReferences } from "@services/references/onReferences"; import { onImplementation } from "@services/implementation/onImplementation"; import { onRename } from "@services/rename/onRename"; -import { RequestType } from "vscode-languageserver-protocol"; -import path = require("path"); -import { decodeUriAndRemoveFilePrefix, toUnixStyle } from "./utils"; -import { CompilerProcessFactory, ServerState } from "./types"; +import { ServerState } from "./types"; import { Telemetry } from "./telemetry/types"; import { attachDocumentHooks } from "./services/documents/attachDocumentHooks"; -export interface GetSolFileDetailsParams { - uri: string; -} -export type GetSolFileDetailsResponse = - | { found: false } - | { found: true; hardhat: false } - | { - found: true; - hardhat: true; - configPath: string; - configDisplayPath: string; - }; - -const GetSolFileDetails = new RequestType< - GetSolFileDetailsParams, - GetSolFileDetailsResponse, - void ->("solidity/getSolFileDetails"); - export default function setupServer( connection: Connection, - compProcessFactory: typeof compilerProcessFactory, workspaceFileRetriever: WorkspaceFileRetriever, telemetry: Telemetry, logger: Logger ): ServerState { const serverState = setupUninitializedServerState( connection, - compProcessFactory, telemetry, - logger + logger, + workspaceFileRetriever ); attachLanguageServerLifeCycleHooks(serverState, workspaceFileRetriever); @@ -65,25 +41,28 @@ export default function setupServer( function setupUninitializedServerState( connection: Connection, - compProcessFactory: CompilerProcessFactory, telemetry: Telemetry, - logger: Logger + logger: Logger, + workspaceFileRetriever: WorkspaceFileRetriever ) { const serverState: ServerState = { env: "production", hasWorkspaceFolderCapability: false, globalTelemetryEnabled: false, hardhatTelemetryEnabled: false, - indexJobCount: 0, - compProcessFactory, connection, - workspaceFolders: [], + indexedWorkspaceFolders: [], + workspaceFoldersToIndex: [], projects: {}, documents: new TextDocuments(TextDocument), solFileIndex: {}, - workerProcesses: {}, telemetry, logger, + solcVersions: [], + indexingFinished: false, + validationCount: 0, + lastValidationId: {}, + workspaceFileRetriever, }; return serverState; @@ -146,39 +125,4 @@ function attachCustomHooks(serverState: ServerState) { serverState.hardhatTelemetryEnabled = enabled; } ); - - serverState.connection.onRequest( - GetSolFileDetails, - (params: GetSolFileDetailsParams): GetSolFileDetailsResponse => { - try { - const solFil = - serverState.solFileIndex[decodeUriAndRemoveFilePrefix(params.uri)]; - - if (solFil === undefined) { - return { found: false }; - } - - if (solFil.project.type !== "hardhat") { - return { found: true, hardhat: false }; - } - - const displayConfigPath = toUnixStyle( - path.relative( - decodeUriAndRemoveFilePrefix(solFil.project.workspaceFolder.uri), - solFil.project.configPath - ) - ); - - return { - found: true, - hardhat: true, - configPath: solFil.project.configPath, - configDisplayPath: displayConfigPath, - }; - } catch (err) { - serverState.logger.error(err); - return { found: false }; - } - } - ); } diff --git a/server/src/services/completion/getImportPathCompletion.ts b/server/src/services/completion/getImportPathCompletion.ts index db4c745b..b7caeb00 100644 --- a/server/src/services/completion/getImportPathCompletion.ts +++ b/server/src/services/completion/getImportPathCompletion.ts @@ -5,7 +5,6 @@ import { CompletionItem, CompletionItemKind, ImportDirectiveNode, - ISolProject, VSCodePosition, } from "@common/types"; import { Logger } from "@utils/Logger"; @@ -19,6 +18,7 @@ export function getImportPathCompletion( { logger }: { logger: Logger } ): CompletionItem[] { const currentImport = node.astNode.path.replace("_;", ""); + const importPath = toUnixStyle(path.join(node.realUri, "..", currentImport)); let items: CompletionItem[]; @@ -32,10 +32,12 @@ export function getImportPathCompletion( logger ); - const indexNodeModuleFolders = - getIndexedNodeModuleFolderCompletions(projCtx); + const projectImportCompletions = projCtx.project.getImportCompletions( + position, + currentImport + ); - items = relativeImports.concat(indexNodeModuleFolders); + items = relativeImports.concat(projectImportCompletions); } else if (isRelativeImport(currentImport)) { items = getRelativeImportPathCompletions( position, @@ -45,7 +47,7 @@ export function getImportPathCompletion( logger ); } else { - items = getDirectImportPathCompletions(position, currentImport, projCtx); + items = projCtx.project.getImportCompletions(position, currentImport); } // Trigger auto-insertion of semicolon after import completion @@ -108,25 +110,7 @@ function getRelativeImportPathCompletions( ); } -function getIndexedNodeModuleFolderCompletions( - projCtx: ProjectContext -): CompletionItem[] { - if (projCtx.project.type === "none" || !projCtx.project.basePath) { - return []; - } - - const uniqueFolders = findNodeModulePackagesInIndex(projCtx); - - return uniqueFolders.map( - (p): CompletionItem => ({ - label: p, - kind: CompletionItemKind.Folder, - documentation: "Imports the package", - }) - ); -} - -function replaceFor( +export function replaceFor( filePath: string, position: VSCodePosition, currentImport: string @@ -142,31 +126,6 @@ function replaceFor( ); } -function getDirectImportPathCompletions( - position: VSCodePosition, - currentImport: string, - projCtx: ProjectContext -): CompletionItem[] { - const contractFilePaths = - currentImport.includes("/") || currentImport.includes(path.sep) - ? findNodeModulesContractFilesInIndex(projCtx, currentImport) - : findNodeModulePackagesInIndex(projCtx); - - return contractFilePaths.map((pathFromNodeModules): CompletionItem => { - const normalizedPath = normalizeSlashes(pathFromNodeModules); - - const completionItem: CompletionItem = { - label: normalizedPath, - textEdit: replaceFor(normalizedPath, position, currentImport), - - kind: CompletionItemKind.Module, - documentation: "Imports the package", - }; - - return completionItem; - }); -} - function getCompletionsFromFiles( position: VSCodePosition, currentImport: string, @@ -282,73 +241,3 @@ function convertFileToCompletion( return null; } } - -function findNodeModulePackagesInIndex({ - project, - solFileIndex, -}: ProjectContext): string[] { - const nodeModulePaths = resolvePotentialNodeModulesPathsFor(project); - - let modulePackages: string[] = []; - for (const nodeModulesPath of nodeModulePaths) { - const allNodeModulePaths = Object.keys(solFileIndex) - .filter((p) => p.startsWith(nodeModulesPath)) - .map((p) => p.replace(nodeModulesPath, "")); - - const uniqueFolders = Array.from( - new Set(allNodeModulePaths.map((p) => p.split("/")[1])) - ); - - modulePackages = modulePackages.concat(uniqueFolders); - } - - return Array.from(new Set(modulePackages)); -} - -function resolvePotentialNodeModulesPathsFor(project: ISolProject): string[] { - let current = project.basePath; - const nodeModulesPaths = []; - - while (current !== "/") { - const previous = current; - - const potentialPath = toUnixStyle(path.join(current, "node_modules")); - nodeModulesPaths.push(potentialPath); - - current = path.resolve(current, ".."); - - if (previous === current) { - break; - } - } - - return nodeModulesPaths; -} - -function findNodeModulesContractFilesInIndex( - { project, solFileIndex }: ProjectContext, - currentImport: string -): string[] { - const nodeModulesPaths = resolvePotentialNodeModulesPathsFor(project); - - let allContractFilePaths: string[] = []; - for (const nodeModulesPath of nodeModulesPaths) { - const basePath = toUnixStyle(path.join(nodeModulesPath, path.sep)); - - const basePathWithCurrentImport = toUnixStyle( - path.join(basePath, currentImport) - ); - - const contractFilePaths = Object.keys(solFileIndex) - .filter((fullPath) => fullPath.startsWith(basePathWithCurrentImport)) - .map((fullPath) => fullPath.replace(basePath, "")); - - allContractFilePaths = allContractFilePaths.concat(contractFilePaths); - } - - return allContractFilePaths; -} - -function normalizeSlashes(p: string) { - return path.sep === "\\" ? p.replace(/\\/g, "/") : p; -} diff --git a/server/src/services/completion/onCompletion.ts b/server/src/services/completion/onCompletion.ts index 8a0d2d56..8a756b49 100644 --- a/server/src/services/completion/onCompletion.ts +++ b/server/src/services/completion/onCompletion.ts @@ -23,8 +23,6 @@ import { CompletionParams, } from "vscode-languageserver/node"; import { applyEditToDocumentAnalyzer } from "@utils/applyEditToDocumentAnalyzer"; -import { findProjectFor } from "@utils/findProjectFor"; -import { decodeUriAndRemoveFilePrefix } from "@utils/index"; import { ServerState } from "../../types"; import { ProjectContext } from "./types"; import { getImportPathCompletion } from "./getImportPathCompletion"; @@ -32,14 +30,14 @@ import { globalVariables, defaultCompletion } from "./defaultCompletion"; import { arrayCompletions } from "./arrayCompletions"; export const onCompletion = (serverState: ServerState) => { - return (params: CompletionParams): CompletionList | null => { + return async (params: CompletionParams): Promise => { const { logger } = serverState; logger.trace("onCompletion"); - return serverState.telemetry.trackTimingSync("onCompletion", () => { + return serverState.telemetry.trackTiming("onCompletion", async () => { const { found, errorMessage, documentAnalyzer, document } = - applyEditToDocumentAnalyzer( + await applyEditToDocumentAnalyzer( serverState, params.textDocument.uri, (doc) => resolveChangedDocText(params, doc) @@ -53,10 +51,7 @@ export const onCompletion = (serverState: ServerState) => { return { status: "failed_precondition", result: null }; } - const project = findProjectFor( - serverState, - decodeUriAndRemoveFilePrefix(document.uri) - ); + const project = documentAnalyzer.project; const projCtx: ProjectContext = { project, diff --git a/server/src/services/completion/types.ts b/server/src/services/completion/types.ts index cfa1f943..e989d0dd 100644 --- a/server/src/services/completion/types.ts +++ b/server/src/services/completion/types.ts @@ -1,6 +1,7 @@ -import { SolFileIndexMap, ISolProject } from "@common/types"; +import { SolFileIndexMap } from "@common/types"; +import { Project } from "../../frameworks/base/Project"; export interface ProjectContext { - project: ISolProject; + project: Project; solFileIndex: SolFileIndexMap; } diff --git a/server/src/services/documents/attachDocumentHooks.ts b/server/src/services/documents/attachDocumentHooks.ts index 945f9699..a483dbf1 100644 --- a/server/src/services/documents/attachDocumentHooks.ts +++ b/server/src/services/documents/attachDocumentHooks.ts @@ -2,8 +2,6 @@ import { ServerState } from "../../types"; import { onDidChangeWatchedFiles } from "./onDidChangeWatchedFiles"; import { onDidChangeContent } from "./onDidChangeContent"; import { onDidOpen } from "./onDidOpen"; -import { onDidClose } from "./onDidClose"; -import { onDidSave } from "./onDidSave"; /** * Establish a sync between the client and the `serverState.documents` @@ -17,8 +15,6 @@ export function attachDocumentHooks(serverState: ServerState) { // responsibility for the file (onOpen/onSave/onClose) as // opposed to files where the cannonical version is on disk serverState.documents.onDidOpen(onDidOpen(serverState)); - serverState.documents.onDidClose(onDidClose(serverState)); - serverState.documents.onDidSave(onDidSave(serverState)); // The content of a text document has changed. This event is emitted // when the text document first opened or when its content has changed. diff --git a/server/src/services/documents/onDidChangeContent.ts b/server/src/services/documents/onDidChangeContent.ts index 9fa3d5a3..effd6e3a 100644 --- a/server/src/services/documents/onDidChangeContent.ts +++ b/server/src/services/documents/onDidChangeContent.ts @@ -30,7 +30,7 @@ export function onDidChangeContent(serverState: ServerState) { analyse: { action: analyse, changeActions: {}, - wait: 500, + wait: 240, }, validate: { action: validate, diff --git a/server/src/services/documents/onDidChangeWatchedFiles.ts b/server/src/services/documents/onDidChangeWatchedFiles.ts index 261ad58e..fb5c0dbc 100644 --- a/server/src/services/documents/onDidChangeWatchedFiles.ts +++ b/server/src/services/documents/onDidChangeWatchedFiles.ts @@ -1,35 +1,34 @@ -import { decodeUriAndRemoveFilePrefix } from "@utils/index"; -import { DidChangeWatchedFilesParams } from "vscode-languageserver"; +import { + DidChangeWatchedFilesParams, + FileChangeType, +} from "vscode-languageserver"; import { ServerState } from "../../types"; -import { restartWorker } from "../validation/restartWorker"; -import { invalidateWorkerPreprocessCache } from "../validation/invalidateWorkerPreprocessCache"; +import { decodeUriAndRemoveFilePrefix } from "../../utils"; +import { indexSolidityFiles } from "../initialization/indexWorkspaceFolders"; export function onDidChangeWatchedFiles(serverState: ServerState) { - return async ({ - changes, - }: DidChangeWatchedFilesParams): Promise => { - const results = []; + return async (params: DidChangeWatchedFilesParams) => { + // Normalize file uris + const normalizedParams = { + changes: params.changes.map((change) => ({ + ...change, + uri: decodeUriAndRemoveFilePrefix(change.uri), + })), + }; - for (const change of changes) { - const internalUri = decodeUriAndRemoveFilePrefix(change.uri); - - if (internalUri.endsWith(".sol")) { - const result = await invalidateWorkerPreprocessCache( - serverState, - internalUri - ); - - results.push(result ?? false); - } else if ( - internalUri.endsWith("hardhat.config.ts") || - internalUri.endsWith("hardhat.config.js") + // Index new solidity files + for (const change of normalizedParams.changes) { + if ( + change.uri.endsWith(".sol") && + change.type === FileChangeType.Created ) { - const result = await restartWorker(serverState, internalUri); - - results.push(result ?? false); + await indexSolidityFiles(serverState, [change.uri]); } } - return results; + // Notify all registered projects of the file changes + for (const project of Object.values(serverState.projects)) { + await project.onWatchedFilesChanges(normalizedParams); + } }; } diff --git a/server/src/services/documents/onDidClose.ts b/server/src/services/documents/onDidClose.ts deleted file mode 100644 index f61884f2..00000000 --- a/server/src/services/documents/onDidClose.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { TextDocumentChangeEvent } from "vscode-languageserver"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import { decodeUriAndRemoveFilePrefix } from "@utils/index"; -import { getOrInitialiseSolFileEntry } from "@utils/getOrInitialiseSolFileEntry"; -import { ServerState } from "../../types"; - -/** - * Record in the index that the client has released the solidity file - * from its responsibility (the version on disk should be taken as canonical). - */ -export function onDidClose(serverState: ServerState) { - return (change: TextDocumentChangeEvent) => { - if (change.document.languageId !== "solidity") { - return; - } - - serverState.logger.trace("onDidClose"); - - const uri = decodeUriAndRemoveFilePrefix(change.document.uri); - const solFileEntry = getOrInitialiseSolFileEntry(serverState, uri); - - solFileEntry.untrack(); - }; -} diff --git a/server/src/services/documents/onDidOpen.ts b/server/src/services/documents/onDidOpen.ts index a0adb6ca..1645daf6 100644 --- a/server/src/services/documents/onDidOpen.ts +++ b/server/src/services/documents/onDidOpen.ts @@ -6,7 +6,7 @@ import { analyzeSolFile } from "@analyzer/analyzeSolFile"; import { ServerState } from "../../types"; export function onDidOpen(serverState: ServerState) { - return (change: TextDocumentChangeEvent) => { + return async (change: TextDocumentChangeEvent) => { if (change.document.languageId !== "solidity") { return; } @@ -18,11 +18,7 @@ export function onDidOpen(serverState: ServerState) { const solFileEntry = getOrInitialiseSolFileEntry(serverState, uri); - // Mark the file as being tracked by the client, but without - // known changes from the file system version - solFileEntry.track(); - // Ensure it is analysed - analyzeSolFile(serverState, solFileEntry, solFileText); + await analyzeSolFile(serverState, solFileEntry, solFileText); }; } diff --git a/server/src/services/documents/onDidSave.ts b/server/src/services/documents/onDidSave.ts deleted file mode 100644 index 5eff6c8b..00000000 --- a/server/src/services/documents/onDidSave.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { TextDocumentChangeEvent } from "vscode-languageserver"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import { decodeUriAndRemoveFilePrefix } from "@utils/index"; -import { getOrInitialiseSolFileEntry } from "@utils/getOrInitialiseSolFileEntry"; -import { ServerState } from "../../types"; - -export function onDidSave(serverState: ServerState) { - return (change: TextDocumentChangeEvent) => { - if (change.document.languageId !== "solidity") { - return; - } - - serverState.logger.trace("onDidSave"); - - const uri = decodeUriAndRemoveFilePrefix(change.document.uri); - const solFileEntry = getOrInitialiseSolFileEntry(serverState, uri); - - solFileEntry.track(); - }; -} diff --git a/server/src/services/initialization/indexWorkspaceFolders.ts b/server/src/services/initialization/indexWorkspaceFolders.ts index 6b6baa47..7eadab78 100644 --- a/server/src/services/initialization/indexWorkspaceFolders.ts +++ b/server/src/services/initialization/indexWorkspaceFolders.ts @@ -1,217 +1,119 @@ -import * as path from "path"; -import { IndexFileData } from "@common/event"; -import { Logger } from "@utils/Logger"; import { WorkspaceFolder } from "vscode-languageserver-protocol"; -import { Connection } from "vscode-languageserver"; -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; +import { WorkspaceFileRetriever } from "@utils/WorkspaceFileRetriever"; import { SolFileEntry } from "@analyzer/SolFileEntry"; -import { Remapping, SolFileIndexMap, SolProjectMap } from "@common/types"; -import { getOrInitialiseSolFileEntry } from "@utils/getOrInitialiseSolFileEntry"; -import { analyzeSolFile } from "@analyzer/analyzeSolFile"; -import { HardhatProject } from "@analyzer/HardhatProject"; -import { findProjectFor } from "@utils/findProjectFor"; +import _ from "lodash"; +import path from "path"; import { decodeUriAndRemoveFilePrefix, toUnixStyle } from "../../utils/index"; +import { ServerState } from "../../types"; +import { HardhatIndexer } from "../../frameworks/Hardhat/HardhatIndexer"; +import { Project } from "../../frameworks/base/Project"; +import { ProjectlessProject } from "../../frameworks/Projectless/ProjectlessProject"; +import { Logger } from "../../utils/Logger"; +import { analyzeSolFile } from "../../parser/analyzer/analyzeSolFile"; +import { getOrInitialiseSolFileEntry } from "../../utils/getOrInitialiseSolFileEntry"; +import { FoundryIndexer } from "../../frameworks/Foundry/FoundryIndexer"; import { resolveTopLevelWorkspaceFolders } from "./resolveTopLevelWorkspaceFolders"; -export interface IndexWorkspaceFoldersContext { - indexJobCount: number; - connection: Connection; - solFileIndex: SolFileIndexMap; - workspaceFolders: WorkspaceFolder[]; - projects: SolProjectMap; - logger: Logger; -} - export async function indexWorkspaceFolders( - indexWorkspaceFoldersContext: IndexWorkspaceFoldersContext, + serverState: ServerState, workspaceFileRetriever: WorkspaceFileRetriever, workspaceFolders: WorkspaceFolder[] ) { - const { logger } = indexWorkspaceFoldersContext; + const logger = _.clone(serverState.logger); + logger.tag = "indexing"; if (workspaceFolders.some((wf) => wf.uri.includes("\\"))) { throw new Error("Unexpect windows style path"); } - indexWorkspaceFoldersContext.indexJobCount++; - const indexJobId = indexWorkspaceFoldersContext.indexJobCount; - - const indexJobStartTime = new Date(); - logger.info(`[indexing:${indexJobId}] Starting indexing job ...`); - - notifyStartIndexing(indexJobId, indexWorkspaceFoldersContext); - const topLevelWorkspaceFolders = resolveTopLevelWorkspaceFolders( - indexWorkspaceFoldersContext, + serverState, workspaceFolders ); + // workspace change events are received duplicated, so return early if there's nothing new to index if (topLevelWorkspaceFolders.length === 0) { - notifyNoOpIndexing( - indexJobId, - indexWorkspaceFoldersContext, - `[indexing:${indexJobId}] Workspace folders already indexed` - ); - return; } - logger.info(`[indexing:${indexJobId}] Workspace folders`); - for (const workspaceFolder of topLevelWorkspaceFolders) { - logger.info(`[indexing:${indexJobId}] ${workspaceFolder.name}`); - } + // Store workspace folders to mark them as indexed + serverState.indexedWorkspaceFolders.push(...topLevelWorkspaceFolders); - for (const workspaceFolder of topLevelWorkspaceFolders) { - try { - await scanForHardhatProjectsAndAppend( - indexJobId, - workspaceFolder, - indexWorkspaceFoldersContext.projects, - workspaceFileRetriever, - logger - ); - } catch (err) { - logger.error(err); - } - } - - const solFiles = await scanForSolFiles( - indexJobId, - indexWorkspaceFoldersContext, - workspaceFileRetriever, - topLevelWorkspaceFolders - ); - - try { - await analyzeSolFiles( - indexJobId, - indexWorkspaceFoldersContext, - workspaceFileRetriever, - indexWorkspaceFoldersContext.projects, - solFiles - ); - } catch (err) { - logger.error(err); - } - - for (const workspaceFolder of topLevelWorkspaceFolders) { - indexWorkspaceFoldersContext.workspaceFolders.push(workspaceFolder); - } - - logger.info( - `[indexing:${indexJobId}] Indexing complete (${ - (new Date().getTime() - indexJobStartTime.getTime()) / 1000 - }s)` - ); -} - -async function loadAndParseRemappings( - basePath: string, - workspaceFileRetriever: WorkspaceFileRetriever -): Promise { - const remappingsPath = path.join(basePath, "remappings.txt"); - if (await workspaceFileRetriever.fileExists(remappingsPath)) { - const rawRemappings = await workspaceFileRetriever.readFile(remappingsPath); - return parseRemappings(rawRemappings, basePath); + if (topLevelWorkspaceFolders.length === 0) { + return; } - return []; -} - -function parseRemappings(rawRemappings: string, basePath: string) { - const lines = rawRemappings.trim().split("\n"); - const remappings: Remapping[] = []; - - for (const line of lines) { - const lineTokens = line.split("=", 2); - - if ( - lineTokens.length !== 2 || - lineTokens[0].length === 0 || - lineTokens[1].length === 0 - ) { - continue; + notifyStartIndexing(serverState); + + // Scan for projects + const indexers = [ + new HardhatIndexer(serverState, workspaceFileRetriever), + new FoundryIndexer(serverState, workspaceFileRetriever), + ]; + const foundProjects: Project[] = []; + await logger.trackTime("Indexing projects", async () => { + for (const indexer of indexers) { + for (const wsFolder of topLevelWorkspaceFolders) { + foundProjects.push(...(await indexer.index(wsFolder))); + } } + }); - const [from, to] = lineTokens; - - remappings.push({ from, to: path.join(basePath, to) }); + logger.info(`Found projects:`); + for (const project of foundProjects) { + logger.info(`- Type: ${project.frameworkName()}`); + logger.info(` Base path: ${project.basePath}`); + logger.info(` Config file: ${project.configPath}`); } - return remappings; -} - -async function scanForHardhatProjectsAndAppend( - indexJobId: number, - workspaceFolder: WorkspaceFolder, - projects: SolProjectMap, - workspaceFileRetriever: WorkspaceFileRetriever, - logger: Logger -): Promise { - const scanningStartTime = new Date(); - logger.info( - `[indexing:${indexJobId}] Scanning ${workspaceFolder.name} for hardhat projects` - ); - - const uri = decodeUriAndRemoveFilePrefix(workspaceFolder.uri); - const hardhatConfigFiles = await workspaceFileRetriever.findFiles( - uri, - "**/hardhat.config.{ts,js}", - ["**/node_modules/**"] - ); + // Append to global project map if they are not already indexed + await logger.trackTime("Initializing projects", async () => { + await Promise.all( + foundProjects.map(async (foundProject) => { + if (foundProject.id() in serverState.projects) { + return; + } - const foundProjects = await Promise.all( - hardhatConfigFiles.map(async (hhcf) => { - const basePath = path.dirname(decodeUriAndRemoveFilePrefix(hhcf)); - const parsedRemappings = await loadAndParseRemappings( - basePath, - workspaceFileRetriever - ); + serverState.projects[foundProject.id()] = foundProject; + logger.info(`Initializing ${foundProject.id()}`); + await foundProject.initialize(); + logger.info(`Done ${foundProject.id()}`); + }) + ); + }); - return new HardhatProject( - basePath, - hhcf, - workspaceFolder, - parsedRemappings - ); - }) - ); + // Find all sol files + let solFileUris: string[]; + await logger.trackTime("Indexing solidity files", async () => { + solFileUris = await scanForSolFiles( + logger, + workspaceFileRetriever, + topLevelWorkspaceFolders + ); - for (const project of foundProjects) { - if (project.basePath in project) { - continue; - } + // Index sol files, and associate the matching project + await indexSolidityFiles(serverState, solFileUris); + }); - projects[project.basePath] = project; + // Store workspace folders to mark them as indexed + for (const workspaceFolder of topLevelWorkspaceFolders) { + serverState.indexedWorkspaceFolders.push(workspaceFolder); } - if (foundProjects.length === 0) { - logger.info( - `[indexing:${indexJobId}] No hardhat projects found in ${workspaceFolder.name}` - ); - } else { - logger.info( - `[indexing:${indexJobId}] Hardhat projects found in ${ - workspaceFolder.name - } (${(new Date().getTime() - scanningStartTime.getTime()) / 1000}s):` - ); + // Analyze files + await logger.trackTime("Analyzing solidity files", async () => { + await analyzeSolFiles(serverState, logger, solFileUris); + }); - for (const foundProject of foundProjects) { - logger.info(`[indexing:${indexJobId}] ${foundProject.basePath}`); - } - } + notifyEndIndexing(serverState); } async function scanForSolFiles( - indexJobId: number, - { logger }: IndexWorkspaceFoldersContext, + logger: Logger, workspaceFileRetriever: WorkspaceFileRetriever, workspaceFolders: WorkspaceFolder[] ): Promise { - const solFileScanStart = new Date(); - logger.info( - `[indexing:${indexJobId}] Scanning workspace folders for sol files` - ); + logger.info(`Scanning workspace folders for sol files`); const batches: string[][] = []; @@ -234,129 +136,101 @@ async function scanForSolFiles( const solFileUris = batches.reduce((acc, batch) => acc.concat(batch), []); - logger.info( - `[indexing:${indexJobId}] Scan complete, ${ - solFileUris.length - } sol files found (${ - (new Date().getTime() - solFileScanStart.getTime()) / 1000 - }s)` - ); + logger.info(`Scan complete, ${solFileUris.length} sol files found`); return solFileUris; } +export async function indexSolidityFiles( + serverState: ServerState, + fileUris: string[] +) { + const indexedProjects = Object.values(serverState.projects); + + for (const fileUri of fileUris) { + let project: Project = new ProjectlessProject( + serverState, + path.dirname(fileUri) + ); + + for (const indexedProject of indexedProjects) { + try { + const belongs = await indexedProject.fileBelongs(fileUri); + if (belongs && indexedProject.priority > project.priority) { + project = indexedProject; + } + } catch (error) { + serverState.logger.trace(`Error on fileBelongs: ${error}`); + continue; + } + } + + serverState.logger.trace(`Associating ${project.id()} to ${fileUri}`); + + const docText = await serverState.workspaceFileRetriever.readFile(fileUri); + serverState.solFileIndex[fileUri] = SolFileEntry.createLoadedEntry( + fileUri, + project, + docText + ); + + notifyFileIndexed(serverState, fileUri, project); + } +} + +function notifyStartIndexing(serverState: ServerState) { + serverState.connection.sendNotification("custom/indexing-start"); +} + +function notifyEndIndexing(serverState: ServerState) { + serverState.connection.sendNotification("custom/indexing-end"); +} + +function notifyFileIndexed( + serverState: ServerState, + uri: string, + project: Project +) { + serverState.connection.sendNotification("custom/file-indexed", { + uri, + project: { + configPath: project.configPath, + frameworkName: project.frameworkName(), + }, + }); +} + async function analyzeSolFiles( - indexJobId: number, - indexWorkspaceFoldersContext: IndexWorkspaceFoldersContext, - workspaceFileRetriever: WorkspaceFileRetriever, - projects: SolProjectMap, + serverState: ServerState, + logger: Logger, solFileUris: string[] ) { - const { connection, solFileIndex, logger } = indexWorkspaceFoldersContext; - const analysisStart = new Date(); + const { solFileIndex } = serverState; try { - logger.info(`[indexing:${indexJobId}] Analysing Sol files`); + // We will initialize all DocumentAnalizers first, because when we analyze documents we enter to their imports and + // if they are not analyzed we analyze them, in order to be able to analyze imports we need to have DocumentAnalizer and + // therefore we initiate everything first. The isAnalyzed serves to check if the document was analyzed so we don't analyze the document twice. + for (let i = 0; i < solFileUris.length; i++) { + const documentUri = solFileUris[i]; - // Init all documentAnalyzers - for (const solFileUri of solFileUris) { try { - const docText = await workspaceFileRetriever.readFile(solFileUri); - const project = findProjectFor({ projects }, solFileUri); + logger.trace(`Analyzing file ${i}/${solFileUris.length}`); - solFileIndex[solFileUri] = SolFileEntry.createLoadedUntrackedEntry( - solFileUri, - project, - docText.toString() + const solFileEntry = getOrInitialiseSolFileEntry( + serverState, + documentUri ); - } catch (err) { - logger.error(err); - } - } - if (solFileUris.length > 0) { - // We will initialize all DocumentAnalizers first, because when we analyze documents we enter to their imports and - // if they are not analyzed we analyze them, in order to be able to analyze imports we need to have DocumentAnalizer and - // therefore we initiate everything first. The isAnalyzed serves to check if the document was analyzed so we don't analyze the document twice. - for (let i = 0; i < solFileUris.length; i++) { - const documentUri = solFileUris[i]; - - try { - const data: IndexFileData = { - jobId: indexJobId, - path: documentUri, - current: i + 1, - total: solFileUris.length, - }; - - connection.sendNotification("custom/indexing-file", data); - - logger.trace(`Indexing file ${i}/${solFileUris.length}`, data); - - const solFileEntry = getOrInitialiseSolFileEntry( - { projects, solFileIndex }, - documentUri - ); - - if (!solFileEntry.isAnalyzed()) { - analyzeSolFile({ solFileIndex }, solFileEntry); - } - } catch (err) { - logger.error(err); - logger.trace("Analysis of file failed", { documentUri }); + if (!solFileEntry.isAnalyzed()) { + await analyzeSolFile({ solFileIndex }, solFileEntry); } + } catch (err) { + logger.error(err); + logger.trace("Analysis of file failed", { documentUri }); } - } else { - notifyNoOpIndexing( - indexJobId, - indexWorkspaceFoldersContext, - "No files to index" - ); } } catch (err) { logger.error(err); - } finally { - logger.info( - `[indexing:${indexJobId}] Analysis complete (${ - (new Date().getTime() - analysisStart.getTime()) / 1000 - }s)` - ); } } - -function notifyNoOpIndexing( - indexJobId: number, - indexWorkspaceFoldersContext: IndexWorkspaceFoldersContext, - logMessage: string -) { - const data: IndexFileData = { - jobId: indexJobId, - path: "", - current: 0, - total: 0, - }; - - indexWorkspaceFoldersContext.connection.sendNotification( - "custom/indexing-file", - data - ); - - indexWorkspaceFoldersContext.logger.trace(logMessage, data); -} - -function notifyStartIndexing( - indexJobId: number, - indexWorkspaceFoldersContext: IndexWorkspaceFoldersContext -) { - const data: IndexFileData = { - jobId: indexJobId, - path: "", - current: 0, - total: 0, - }; - - indexWorkspaceFoldersContext.connection.sendNotification( - "custom/indexing-start", - data - ); -} diff --git a/server/src/services/initialization/onInitialize.ts b/server/src/services/initialization/onInitialize.ts index ea3a49c1..1cc7b107 100644 --- a/server/src/services/initialization/onInitialize.ts +++ b/server/src/services/initialization/onInitialize.ts @@ -4,11 +4,12 @@ import { InitializeResult, } from "vscode-languageserver/node"; import { ServerState } from "../../types"; +import { updateAvailableSolcVersions } from "./updateAvailableSolcVersions"; export const onInitialize = (serverState: ServerState) => { const { logger } = serverState; - return (params: InitializeParams) => { + return async (params: InitializeParams) => { logger.trace("onInitialize"); logger.info("Language server starting"); @@ -30,6 +31,11 @@ export const onInitialize = (serverState: ServerState) => { extensionVersion, }); + // fetch available solidity versions + await updateAvailableSolcVersions(serverState); + + logger.info("Language server ready"); + const result: InitializeResult = { serverInfo: { name: "Hardhat Language Server", @@ -94,7 +100,7 @@ function updateServerStateFromParams( serverState.hardhatTelemetryEnabled = params.initializationOptions?.hardhatTelemetryEnabled ?? false; - serverState.workspaceFolders = params.workspaceFolders ?? []; + serverState.workspaceFoldersToIndex = params.workspaceFolders ?? []; serverState.hasWorkspaceFolderCapability = params.capabilities.workspace !== undefined && @@ -127,11 +133,11 @@ function logInitializationInfo( ); } - if (serverState.workspaceFolders.length === 0) { + if (serverState.workspaceFoldersToIndex.length === 0) { logger.info(` Workspace Folders: none`); } else { logger.info(` Workspace Folders:`); - for (const folder of serverState.workspaceFolders) { + for (const folder of serverState.workspaceFoldersToIndex) { logger.info(` ${folder.name} (${folder.uri})`); } } diff --git a/server/src/services/initialization/onInitialized.ts b/server/src/services/initialization/onInitialized.ts index c986b67f..74881cc1 100644 --- a/server/src/services/initialization/onInitialized.ts +++ b/server/src/services/initialization/onInitialized.ts @@ -1,5 +1,4 @@ -import { isHardhatProject } from "@analyzer/HardhatProject"; -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; +import { WorkspaceFileRetriever } from "@utils/WorkspaceFileRetriever"; import { ServerState } from "../../types"; import { indexWorkspaceFolders } from "./indexWorkspaceFolders"; import { removeWorkspaceFolders } from "./removeWorkspaceFolders"; @@ -10,15 +9,12 @@ export const onInitialized = ( ) => { const { logger } = serverState; + // set up listener for workspace folder changes return async () => { logger.trace("onInitialized"); if (serverState.hasWorkspaceFolderCapability) { serverState.connection.workspace.onDidChangeWorkspaceFolders((e) => { - logger.trace( - `Workspace folder change event received. ${e.added} ${e.removed}` - ); - if (e.added.length > 0) { return indexWorkspaceFolders( serverState, @@ -33,45 +29,16 @@ export const onInitialized = ( }); } + // index folders await serverState.telemetry.trackTiming("indexing", async () => { await indexWorkspaceFolders( - { ...serverState, workspaceFolders: [] }, + serverState, workspaceFileRetriever, - serverState.workspaceFolders + serverState.workspaceFoldersToIndex ); + serverState.indexingFinished = true; return { status: "ok", result: null }; }); - - serverState.telemetry.trackTimingSync("worker setup", () => { - setupWorkerProcesses(serverState); - - return { status: "ok", result: null }; - }); - - logger.info("Language server ready"); }; }; - -function setupWorkerProcesses(serverState: ServerState) { - const workerProcesses = serverState.workerProcesses; - for (const project of Object.values(serverState.projects)) { - if (project.basePath in workerProcesses) { - continue; - } - - if (!isHardhatProject(project)) { - continue; - } - - const workerProcess = serverState.compProcessFactory( - project, - serverState.logger, - serverState.connection - ); - - workerProcesses[project.basePath] = workerProcess; - - workerProcess.init(); - } -} diff --git a/server/src/services/initialization/removeWorkspaceFolders.ts b/server/src/services/initialization/removeWorkspaceFolders.ts index 1ade9429..3d6d8564 100644 --- a/server/src/services/initialization/removeWorkspaceFolders.ts +++ b/server/src/services/initialization/removeWorkspaceFolders.ts @@ -13,7 +13,8 @@ export function removeWorkspaceFolders( // projects and files that are still part // of a parent workspace). This is punted on // for the moment. - serverState.workspaceFolders = serverState.workspaceFolders.filter( - (wf) => !removed.some((r) => r.uri === wf.uri) - ); + serverState.indexedWorkspaceFolders = + serverState.indexedWorkspaceFolders.filter( + (wf) => !removed.some((r) => r.uri === wf.uri) + ); } diff --git a/server/src/services/initialization/resolveTopLevelWorkspaceFolders.ts b/server/src/services/initialization/resolveTopLevelWorkspaceFolders.ts index 566344b7..19cdc59f 100644 --- a/server/src/services/initialization/resolveTopLevelWorkspaceFolders.ts +++ b/server/src/services/initialization/resolveTopLevelWorkspaceFolders.ts @@ -1,11 +1,12 @@ import { WorkspaceFolder } from "vscode-languageserver-protocol"; +import { ServerState } from "../../types"; /** * Workspaces can be nested, we are only concerned with new folders * that are top level for indexing. */ export function resolveTopLevelWorkspaceFolders( - { workspaceFolders }: { workspaceFolders: WorkspaceFolder[] }, + serverState: ServerState, addedWorkspaceFolders: WorkspaceFolder[] ): WorkspaceFolder[] { const addedUris = addedWorkspaceFolders.map((awf) => awf.uri); @@ -17,7 +18,7 @@ export function resolveTopLevelWorkspaceFolders( const notAlreadyProcessed = rootAddedWorkspaces.filter( (awf) => - !workspaceFolders.some( + !serverState.indexedWorkspaceFolders.some( (wf) => awf.uri === wf.uri || awf.uri.startsWith(wf.uri) ) ); diff --git a/server/src/services/initialization/updateAvailableSolcVersions.ts b/server/src/services/initialization/updateAvailableSolcVersions.ts new file mode 100644 index 00000000..0fec0da2 --- /dev/null +++ b/server/src/services/initialization/updateAvailableSolcVersions.ts @@ -0,0 +1,113 @@ +import _ from "lodash"; +import got from "got"; +import { ServerState } from "../../types"; + +const availableVersions = [ + "0.3.6", + "0.4.0", + "0.4.1", + "0.4.2", + "0.4.3", + "0.4.4", + "0.4.5", + "0.4.6", + "0.4.7", + "0.4.8", + "0.4.9", + "0.4.10", + "0.4.11", + "0.4.12", + "0.4.13", + "0.4.14", + "0.4.15", + "0.4.16", + "0.4.17", + "0.4.18", + "0.4.19", + "0.4.20", + "0.4.21", + "0.4.22", + "0.4.23", + "0.4.24", + "0.4.25", + "0.4.26", + "0.5.0", + "0.5.1", + "0.5.2", + "0.5.3", + "0.5.4", + "0.5.5", + "0.5.6", + "0.5.7", + "0.5.8", + "0.5.9", + "0.5.10", + "0.5.11", + "0.5.12", + "0.5.13", + "0.5.14", + "0.5.15", + "0.5.16", + "0.5.17", + "0.6.0", + "0.6.1", + "0.6.2", + "0.6.3", + "0.6.4", + "0.6.5", + "0.6.6", + "0.6.7", + "0.6.8", + "0.6.9", + "0.6.10", + "0.6.11", + "0.6.12", + "0.7.0", + "0.7.1", + "0.7.2", + "0.7.3", + "0.7.4", + "0.7.5", + "0.7.6", + "0.8.0", + "0.8.1", + "0.8.2", + "0.8.3", + "0.8.4", + "0.8.5", + "0.8.6", + "0.8.7", + "0.8.8", + "0.8.9", + "0.8.10", + "0.8.11", + "0.8.12", + "0.8.13", + "0.8.14", + "0.8.15", + "0.8.16", +]; + +export async function updateAvailableSolcVersions(state: ServerState) { + const latestVersions = await fetchLatestVersions(state); + + state.solcVersions = _.union(availableVersions, latestVersions); +} + +interface VersionsResponse { + builds: Array<{ version: string }>; +} +async function fetchLatestVersions(state: ServerState) { + try { + const data: VersionsResponse = await got + .get("https://binaries.soliditylang.org/wasm/list.json", { + timeout: 2000, + }) + .json(); + + return _.map(data.builds, "version"); + } catch (error) { + state.telemetry.captureException(error); + return []; + } +} diff --git a/server/src/services/rename/onRename.ts b/server/src/services/rename/onRename.ts index 0886d216..c2be4e70 100644 --- a/server/src/services/rename/onRename.ts +++ b/server/src/services/rename/onRename.ts @@ -11,12 +11,8 @@ import { import { getParserPositionFromVSCodePosition, getRange } from "@common/utils"; import { findReferencesFor } from "@utils/findReferencesFor"; -import { invalidateWorkerPreprocessCache } from "@services/validation/invalidateWorkerPreprocessCache"; import { ServerState } from "../../types"; -import { - convertHardhatUriToVscodeUri, - decodeUriAndRemoveFilePrefix, -} from "../../utils/index"; +import { convertHardhatUriToVscodeUri } from "../../utils/index"; export const onRename = (serverState: ServerState) => { return async (params: RenameParams) => { @@ -25,19 +21,7 @@ export const onRename = (serverState: ServerState) => { serverState, "onRenameRequest", params.textDocument.uri, - (documentAnalyzer) => - rename(documentAnalyzer, params.position, params.newName) - ); - - // Renames are multifile, if the change to the current - // editor goes to validation before any file changes - // are recorded, preprocessing won't recognise that - // the cache is no longer valid, hence we clear it - // before returning - await invalidateWorkerPreprocessCache( - serverState, - decodeUriAndRemoveFilePrefix(params.textDocument.uri), - true + (solFileEntry) => rename(solFileEntry, params.position, params.newName) ); return workspaceEdit; @@ -48,14 +32,16 @@ export const onRename = (serverState: ServerState) => { }; function rename( - documentAnalyzer: ISolFileEntry, + solFileEntry: ISolFileEntry, position: VSCodePosition, newName: string ): WorkspaceEdit { - const originRenameNode = documentAnalyzer.searcher.findRenameNodeByPosition( - documentAnalyzer.uri, + solFileEntry.project.invalidateBuildCache(); + + const originRenameNode = solFileEntry.searcher.findRenameNodeByPosition( + solFileEntry.uri, getParserPositionFromVSCodePosition(position), - documentAnalyzer.analyzerTree.tree + solFileEntry.analyzerTree.tree ); if (!originRenameNode) { diff --git a/server/src/services/validation/CompilationService.ts b/server/src/services/validation/CompilationService.ts new file mode 100644 index 00000000..38924b1a --- /dev/null +++ b/server/src/services/validation/CompilationService.ts @@ -0,0 +1,74 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ + +import { existsSync } from "fs"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; +import path from "path"; +import { CompilationDetails } from "../../frameworks/base/CompilationDetails"; + +export class CompilationService { + public static async compile( + compilationDetails: CompilationDetails + ): Promise { + const hre = this._getHRE(); + const { input } = compilationDetails; + + // Find or download solc compiler + const { compilerPath } = await hre.run("compile:solidity:solc:get-build", { + solcVersion: compilationDetails.solcVersion, + quiet: true, + }); + + // Compile + const output = await hre.run("compile:solidity:solc:run", { + input, + solcPath: compilerPath, + }); + + // Normalize errors' sourceLocation to use utf-8 offsets instead of byte offsets + for (const error of output.errors || []) { + const source = input.sources[error.sourceLocation?.file]; + + if (source === undefined) { + continue; + } + + error.sourceLocation.start = this._normalizeOffset( + source.content, + error.sourceLocation.start + ); + error.sourceLocation.end = this._normalizeOffset( + source.content, + error.sourceLocation.end + ); + } + + return output; + } + + // Workaround to load hardhat, since it requires a hardhat.config file to exist + private static _getHRE(): HardhatRuntimeEnvironment { + let directory = __dirname; + while (directory !== "/") { + const potentialConfigFiles = ["ts", "js"].map((ext) => + path.join(directory, `hardhat.config.${ext}`) + ); + for (const potentialConfigFile of potentialConfigFiles) { + if (existsSync(potentialConfigFile)) { + process.env.HARDHAT_CONFIG = potentialConfigFile; + return require("hardhat"); + } + } + directory = path.dirname(directory); + } + throw new Error(`Couldn't load bundled hardhat library`); + } + + private static _normalizeOffset(text: string, offset: number) { + if (offset < 0) { + return offset; // don't transform negative offsets + } else { + return Buffer.from(text, "utf-8").slice(0, offset).toString("utf-8") + .length; + } + } +} diff --git a/server/src/services/validation/DiagnosticConverter.ts b/server/src/services/validation/DiagnosticConverter.ts index 885afa10..c416db49 100644 --- a/server/src/services/validation/DiagnosticConverter.ts +++ b/server/src/services/validation/DiagnosticConverter.ts @@ -3,7 +3,7 @@ import { passThroughConversion } from "@compilerDiagnostics/conversions/passThro import { compilerDiagnostics } from "@compilerDiagnostics/compilerDiagnostics"; import { CompilerDiagnostic } from "@compilerDiagnostics/types"; import { Logger } from "@utils/Logger"; -import { HardhatCompilerError } from "../../types"; +import { SolcError } from "../../types"; export class DiagnosticConverter { private logger: Logger; @@ -14,7 +14,7 @@ export class DiagnosticConverter { public convertErrors( document: TextDocument, - errors: HardhatCompilerError[] + errors: SolcError[] ): { [uri: string]: Diagnostic[] } { const diagnostics: { [uri: string]: Diagnostic[] } = {}; @@ -35,10 +35,7 @@ export class DiagnosticConverter { return diagnostics; } - public convert( - document: TextDocument, - error: HardhatCompilerError - ): Diagnostic { + public convert(document: TextDocument, error: SolcError): Diagnostic { if (error.errorCode in compilerDiagnostics) { return compilerDiagnostics[error.errorCode].fromHardhatCompilerError( document, @@ -49,9 +46,7 @@ export class DiagnosticConverter { } } - private _filterBlockedErrors( - errors: HardhatCompilerError[] - ): HardhatCompilerError[] { + private _filterBlockedErrors(errors: SolcError[]): SolcError[] { const locationGroups = this._groupByLocation(errors); return Object.values(locationGroups).flatMap( @@ -59,12 +54,9 @@ export class DiagnosticConverter { ); } - private _groupByLocation(errors: HardhatCompilerError[]) { + private _groupByLocation(errors: SolcError[]) { return errors.reduce( - ( - acc: { [key: string]: HardhatCompilerError[] }, - error: HardhatCompilerError - ) => { + (acc: { [key: string]: SolcError[] }, error: SolcError) => { const key = this._resolveErrorFileKey(error); if (!(key in acc)) { @@ -78,7 +70,7 @@ export class DiagnosticConverter { ); } - private _resolveErrorFileKey(error: HardhatCompilerError) { + private _resolveErrorFileKey(error: SolcError) { if (!error.sourceLocation) { this.logger.error( new Error( @@ -92,9 +84,7 @@ export class DiagnosticConverter { return `${error.sourceLocation.file}::${error.sourceLocation.start}::${error.sourceLocation.end}`; } - private _filterBlockedErrorsWithinGroup( - errors: HardhatCompilerError[] - ): HardhatCompilerError[] { + private _filterBlockedErrorsWithinGroup(errors: SolcError[]): SolcError[] { const blockCodes = errors .map((d) => (d.errorCode ? compilerDiagnostics[d.errorCode] : undefined)) .filter((cd): cd is CompilerDiagnostic => cd !== undefined) diff --git a/server/src/services/validation/HardhatWorker.ts b/server/src/services/validation/HardhatWorker.ts deleted file mode 100644 index f35f1ecb..00000000 --- a/server/src/services/validation/HardhatWorker.ts +++ /dev/null @@ -1,326 +0,0 @@ -import * as childProcess from "child_process"; -import * as path from "path"; -import { HardhatProject } from "@analyzer/HardhatProject"; -import { Logger } from "@utils/Logger"; -import { Connection } from "vscode-languageserver"; -import { - InitialisationCompleteMessage, - InvalidatePreprocessingCacheMessage, - ValidateCommand, - ValidationCompleteMessage, - WorkerProcess, -} from "../../types"; - -const UNINITIALIZED = "UNINITIALIZED"; -const STARTING = "STARTING"; -const RUNNING = "RUNNING"; -const INITIALIZATION_ERRORED = "INITIALIZATION_ERRORED"; - -type HardhatWorkerStatus = - | typeof UNINITIALIZED - | typeof INITIALIZATION_ERRORED - | typeof STARTING - | typeof RUNNING; - -export function createProcessFor( - project: HardhatProject -): childProcess.ChildProcess { - return childProcess.fork(path.resolve(__dirname, "worker.js"), { - cwd: project.basePath, - detached: true, - }); -} - -export class HardhatWorker implements WorkerProcess { - public project: HardhatProject; - public status: HardhatWorkerStatus; - public connection: Connection; - public jobs: { - [key: string]: { - resolve: (message: ValidationCompleteMessage) => void; - reject: (err: string) => void; - }; - }; - - private child: childProcess.ChildProcess | null; - private createProcessFor: ( - project: HardhatProject - ) => childProcess.ChildProcess; - private logger: Logger; - private jobCount: number; - - constructor( - project: HardhatProject, - givenCreateProcessFor: ( - project: HardhatProject - ) => childProcess.ChildProcess, - logger: Logger, - connection: Connection - ) { - this.child = null; - this.jobCount = 0; - this.jobs = {}; - - this.project = project; - this.createProcessFor = givenCreateProcessFor; - this.logger = logger; - this.connection = connection; - - this.status = UNINITIALIZED; - } - - /** - * Setup the background validation process along with listeners - * on the LSP side. - * - * The status immediately moves from UNINITIALIZED -> STARTING. An - * `INITIALISATION_COMPLETE` message from the process will move - * the status to RUNNING (an unexpected exit will move it to - * INITIALIZATION_ERRORED). - */ - public init() { - if (![UNINITIALIZED, INITIALIZATION_ERRORED].includes(this.status)) { - throw new Error("Cannot start a worker thread that has already started"); - } - - this.status = STARTING; - - this.child = this.createProcessFor(this.project); - - // deal with messages sent from the background process to the LSP - this.child.on( - "message", - (message: InitialisationCompleteMessage | ValidationCompleteMessage) => { - switch (message.type) { - case "INITIALISATION_COMPLETE": - this.status = RUNNING; - this.logger.trace( - `initialisation complete for ${this.project.basePath}` - ); - this.connection.sendNotification("custom/worker-initialized", { - projectBasePath: this.project.basePath, - }); - break; - case "VALIDATION_COMPLETE": - this._validationComplete(message); - break; - default: - this._unexectpedMessage(message); - break; - } - } - ); - - // errors on the background thread are logged - this.child.on("error", (err) => { - this.logger.error(err); - }); - - // if the background process exits due to an error - // we restart if it has previously been running, - // if exits during initialization, we leave it in - // the errored state - this.child.on("exit", this.handleExit.bind(this)); - } - - public async validate({ - uri, - documentText, - projectBasePath, - openDocuments, - }: { - uri: string; - documentText: string; - projectBasePath: string; - openDocuments: Array<{ - uri: string; - documentText: string; - }>; - }) { - return new Promise((resolve, reject) => { - const jobId = this.jobCount++; - - if (this.child === null) { - return reject(new Error("No child process to send validation")); - } - - if (this.status !== RUNNING) { - return this._validationBlocked( - { jobId, projectBasePath }, - resolve, - reject - ); - } - - this.jobs[jobId] = { resolve, reject }; - - const message: ValidateCommand = { - type: "VALIDATE", - jobId, - uri, - documentText, - projectBasePath, - openDocuments, - }; - - this.child.send(message, (err) => { - if (err) { - delete this.jobs[jobId]; - return reject(err); - } - }); - }); - } - - /** - * Inform the background validation process to clear its file caches - * and reread the solidity files from disk on the next job. - * - * @returns whether the cace was cleared - */ - public async invalidatePreprocessingCache(): Promise { - return new Promise((resolve, reject) => { - if (this.child === null) { - return reject( - new Error("No child process to send invalidate preprocessing cache") - ); - } - - // Only running validators can have their cache cleared - if (this.status !== RUNNING) { - return resolve(false); - } - - const message: InvalidatePreprocessingCacheMessage = { - type: "INVALIDATE_PREPROCESSING_CACHE", - }; - - this.child?.send(message, (err) => { - if (err) { - return reject(err); - } - - return resolve(true); - }); - }); - } - - /** - * Stop the current background validation process. - * - * The status will be set back to the unstarted state (UNINITIALIZED). - */ - public kill() { - this.child?.kill(); - // reset status to allow restarting in future - this.status = UNINITIALIZED; - } - - /** - * Stop the current background validation process and start a new one. - * - * The jobs being currently processeded are all cancelled. - */ - public async restart(): Promise { - this.logger.trace(`Restarting hardhat worker for ${this.project.basePath}`); - - this._cancelCurrentJobs(); - - this.kill(); - this.init(); - } - - public handleExit(code: number | null, signal: NodeJS.Signals | null) { - this.logger.trace( - `Hardhat Worker Process restart (${code}): ${this.project.basePath}` - ); - - if (code === 0 || signal !== null) { - this.status = UNINITIALIZED; - this._cancelCurrentJobs(); - return; - } - - if (this.status === STARTING) { - this.status = INITIALIZATION_ERRORED; - this._cancelCurrentJobs(); - return; - } - - if (this.status !== RUNNING) { - this.logger.error( - new Error( - "Exit from validator that is already UNINITIALIZED/INITIALIZATION_ERRORED" - ) - ); - return; - } - - return this.restart(); - } - - private _validationComplete(message: ValidationCompleteMessage) { - if (!(message.jobId in this.jobs)) { - this.logger.error("No job registered for validation complete"); - return; - } - - const { resolve } = this.jobs[message.jobId]; - - delete this.jobs[message.jobId]; - - resolve(message); - } - - private _unexectpedMessage(message: never) { - this.logger.error(new Error(`Unexpected error type: ${message}`)); - } - - private _validationBlocked( - { jobId, projectBasePath }: { jobId: number; projectBasePath: string }, - resolve: ( - value: ValidationCompleteMessage | PromiseLike - ) => void, - _reject: (reason?: string) => void - ): void { - if (this.status === STARTING) { - return resolve({ - type: "VALIDATION_COMPLETE", - status: "VALIDATOR_ERROR", - jobId, - projectBasePath, - reason: "validator-starting", - }); - } - - if (this.status === "INITIALIZATION_ERRORED") { - return resolve({ - type: "VALIDATION_COMPLETE", - status: "VALIDATOR_ERROR", - jobId, - projectBasePath, - reason: "validator-initialization-failed", - }); - } - - return resolve({ - type: "VALIDATION_COMPLETE", - status: "VALIDATOR_ERROR", - jobId, - projectBasePath, - reason: "validator-in-unexpected-state", - }); - } - - /** - * Reject any open jobs whose result is still promised. - */ - private _cancelCurrentJobs() { - for (const jobId of Object.keys(this.jobs)) { - const { reject } = this.jobs[jobId]; - reject("Worker process restarted"); - - delete this.jobs[jobId]; - } - } -} diff --git a/server/src/services/validation/OutputConverter.ts b/server/src/services/validation/OutputConverter.ts new file mode 100644 index 00000000..af81f8bf --- /dev/null +++ b/server/src/services/validation/OutputConverter.ts @@ -0,0 +1,31 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { CompilationDetails } from "../../frameworks/base/CompilationDetails"; +import { ValidationResult, ValidationFail, ValidationPass } from "../../types"; + +export class OutputConverter { + public static getValidationResults( + compilationDetails: CompilationDetails, + solcOutput: any, + projectBasePath: string + ): ValidationResult { + if (solcOutput.errors?.length > 0) { + const validationFailMessage: ValidationFail = { + status: "VALIDATION_FAIL", + projectBasePath, + version: compilationDetails.solcVersion, + errors: solcOutput.errors, + }; + + return validationFailMessage; + } else { + const validationPassMessage: ValidationPass = { + status: "VALIDATION_PASS", + projectBasePath, + version: compilationDetails.solcVersion, + sources: Object.keys(compilationDetails.input.sources), + }; + + return validationPassMessage; + } + } +} diff --git a/server/src/services/validation/analyse.ts b/server/src/services/validation/analyse.ts index a0cba313..019d9b11 100644 --- a/server/src/services/validation/analyse.ts +++ b/server/src/services/validation/analyse.ts @@ -5,22 +5,18 @@ import { analyzeSolFile } from "@analyzer/analyzeSolFile"; import { decodeUriAndRemoveFilePrefix } from "../../utils/index"; import { ServerState } from "../../types"; -export function analyse( - { projects, solFileIndex, logger }: ServerState, +export async function analyse( + serverState: ServerState, { document: changeDoc }: TextDocumentChangeEvent ) { - logger.trace("analyse"); + serverState.logger.trace("analyse"); try { const internalUri = decodeUriAndRemoveFilePrefix(changeDoc.uri); - const solFileEntry = getOrInitialiseSolFileEntry( - { projects, solFileIndex }, - internalUri - ); + const solFileEntry = getOrInitialiseSolFileEntry(serverState, internalUri); - solFileEntry.track(); - analyzeSolFile({ solFileIndex }, solFileEntry, changeDoc.getText()); + await analyzeSolFile(serverState, solFileEntry, changeDoc.getText()); } catch (err) { - logger.error(err); + serverState.logger.error(err); } } diff --git a/server/src/services/validation/compilerProcessFactory.ts b/server/src/services/validation/compilerProcessFactory.ts deleted file mode 100644 index 1c95ff5b..00000000 --- a/server/src/services/validation/compilerProcessFactory.ts +++ /dev/null @@ -1,14 +0,0 @@ -/* istanbul ignore file: top level dependency injection */ -import { HardhatProject } from "@analyzer/HardhatProject"; -import { Logger } from "@utils/Logger"; -import { Connection } from "vscode-languageserver"; -import { WorkerProcess } from "../../types"; -import { createProcessFor, HardhatWorker } from "./HardhatWorker"; - -export function compilerProcessFactory( - project: HardhatProject, - logger: Logger, - connection: Connection -): WorkerProcess { - return new HardhatWorker(project, createProcessFor, logger, connection); -} diff --git a/server/src/services/validation/convertHardhatErrorToDiagnostic.ts b/server/src/services/validation/convertHardhatErrorToDiagnostic.ts deleted file mode 100644 index 5c8f3432..00000000 --- a/server/src/services/validation/convertHardhatErrorToDiagnostic.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { Diagnostic, DiagnosticSeverity, Range } from "@common/types"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import type { - HardhatError, - HardhatImportFileError, - HardhatImportLibraryError, -} from "../../types"; - -export const IMPORT_FILE_ERROR_CODES = [404, 405, 406, 407, 408, 409]; -export const IMPORT_LIBRARY_ERROR_CODES = [411]; - -function isHardhatImportFileError( - error: HardhatError -): error is HardhatImportFileError { - const errorCode = error?.errorDescriptor?.number; - - return IMPORT_FILE_ERROR_CODES.includes(errorCode); -} - -function isHardhatImportLibraryError( - error: HardhatError -): error is HardhatImportLibraryError { - const errorCode = error?.errorDescriptor?.number; - - return IMPORT_LIBRARY_ERROR_CODES.includes(errorCode); -} - -function getImportString(err: HardhatError) { - if (isHardhatImportFileError(err)) { - return err.messageArguments.imported; - } else if (isHardhatImportLibraryError(err)) { - return err.messageArguments.library; - } else { - return null; - } -} - -export function convertHardhatErrorToDiagnostic( - document: TextDocument, - err: HardhatError -): Diagnostic | null { - const importString = getImportString(err); - - if (importString === null) return null; - - return resolveImportError(document, err, importString); -} - -function resolveImportError( - document: TextDocument, - err: HardhatError, - importString: string -) { - const range = findRangeForImport(document, importString); - - if (!range) { - return null; - } - - return { - severity: DiagnosticSeverity.Error, - code: err.errorDescriptor.number, - source: "hardhat", - message: err.errorDescriptor.title, - range, - }; -} - -function findRangeForImport( - document: TextDocument, - importString: string -): Range | null { - const startIndex = document.getText().indexOf(importString); - - if (startIndex === -1) { - return null; - } - - const endIndex = startIndex + importString.length; - - return { - start: document.positionAt(startIndex), - end: document.positionAt(endIndex), - }; -} diff --git a/server/src/services/validation/invalidateWorkerPreprocessCache.ts b/server/src/services/validation/invalidateWorkerPreprocessCache.ts deleted file mode 100644 index a79755e9..00000000 --- a/server/src/services/validation/invalidateWorkerPreprocessCache.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { ClientTrackingState } from "@common/types"; -import { ServerState, WorkerProcess } from "../../types"; - -export async function invalidateWorkerPreprocessCache( - serverState: ServerState, - uri: string, - allowTracked = false -) { - return serverState.telemetry.trackTiming( - "worker preprocessing cache invalidate", - async () => { - serverState.logger.trace( - `Invalidating worker preprocessing cache: ${uri}` - ); - - const entry = serverState.solFileIndex[uri]; - - if (entry === undefined) { - return { status: "failed_precondition", result: false }; - } - - if (!allowTracked && entry.tracking === ClientTrackingState.TRACKED) { - return { status: "ok", result: false }; - } - - const project = entry.project; - - if (project.type !== "hardhat") { - return { status: "ok", result: false }; - } - - const workerProcess: WorkerProcess | undefined = - serverState.workerProcesses[project.basePath]; - - if (workerProcess === undefined) { - return { status: "failed_precondition", result: false }; - } - - const result: boolean = - await workerProcess.invalidatePreprocessingCache(); - - return { status: "ok", result }; - } - ); -} diff --git a/server/src/services/validation/restartWorker.ts b/server/src/services/validation/restartWorker.ts deleted file mode 100644 index c0074956..00000000 --- a/server/src/services/validation/restartWorker.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { ServerState, WorkerProcess } from "../../types"; - -export async function restartWorker(serverState: ServerState, uri: string) { - return serverState.telemetry.trackTiming("worker restart", async () => { - serverState.logger.trace(`Restarting worker: ${uri}`); - - const project = Object.values(serverState.projects).find( - (p) => p.configPath === uri - ); - - if (project === undefined) { - serverState.logger.error( - `No project found for changed config file: ${uri}` - ); - - return { status: "failed_precondition", result: false }; - } - - const workerProcess: WorkerProcess | undefined = - serverState.workerProcesses[project.basePath]; - - if (workerProcess === undefined) { - serverState.logger.error( - new Error( - `No worker process for changed config file: ${project.basePath}` - ) - ); - - return { status: "failed_precondition", result: false }; - } - - await workerProcess.restart(); - - return { status: "ok", result: true }; - }); -} diff --git a/server/src/services/validation/validate.ts b/server/src/services/validation/validate.ts index 459f9b53..1413cadc 100644 --- a/server/src/services/validation/validate.ts +++ b/server/src/services/validation/validate.ts @@ -1,92 +1,141 @@ -import { Diagnostic, TextDocumentChangeEvent } from "vscode-languageserver"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import { isHardhatProject } from "@analyzer/HardhatProject"; -import { deserializeError } from "serialize-error"; +/* eslint-disable @typescript-eslint/no-non-null-assertion */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { + Diagnostic, + DiagnosticSeverity, + TextDocumentChangeEvent, +} from "vscode-languageserver"; +import { TextDocument, Range } from "vscode-languageserver-textdocument"; +import _ from "lodash"; import path from "path"; -import { decodeUriAndRemoveFilePrefix } from "../../utils/index"; +import { decodeUriAndRemoveFilePrefix, toUnixStyle } from "../../utils/index"; import { - CancelledValidation, - HardhatError, - HardhatSourceImportError, - HardhatThrownError, JobCompletionError, ServerState, - UnknownError, - ValidationCompleteMessage, + ValidationResult, ValidationFail, ValidationJobFailureNotification, ValidationJobStatusNotification, ValidationPass, - ValidatorError, - WorkerProcess, + OpenDocuments, + BuildInputFailed, } from "../../types"; import { getOpenDocumentsInProject } from "../../queries/getOpenDocumentsInProject"; import { runningOnWindows } from "../../utils/operatingSystem"; +import { CompilationDetails } from "../../frameworks/base/CompilationDetails"; import { DiagnosticConverter } from "./DiagnosticConverter"; -import { convertHardhatErrorToDiagnostic } from "./convertHardhatErrorToDiagnostic"; +import { CompilationService } from "./CompilationService"; +import { OutputConverter } from "./OutputConverter"; export async function validate( serverState: ServerState, change: TextDocumentChangeEvent ): Promise { return serverState.telemetry.trackTiming("validation", async () => { - const internalUri = decodeUriAndRemoveFilePrefix(change.document.uri); + // Ensure file indexing finished + if (!serverState.indexingFinished) { + serverState.logger.trace(`Can't validate before indexing is finished`); + return { status: "failed_precondition", result: false }; + } - const solFileEntry = serverState.solFileIndex[internalUri]; + // Ensure file is analyzed + const sourceUri = decodeUriAndRemoveFilePrefix(change.document.uri); + const solFileEntry = serverState.solFileIndex[sourceUri]; if (solFileEntry === undefined) { serverState.logger.error( new Error( - `Could not send to valiation process, uri is not indexed: ${internalUri}` + `Could not send to validation process, uri is not indexed: ${sourceUri}` ) ); return { status: "failed_precondition", result: false }; } - if (!isHardhatProject(solFileEntry.project)) { - serverState.logger.trace( - `No project associated with file, change not propagated to validation process: ${change.document.uri}` - ); + // Get the file project's open documents + const openDocuments = getOpenDocumentsInProject( + serverState, + solFileEntry.project + ).map((openDoc) => ({ + uri: decodeUriAndRemoveFilePrefix(openDoc.uri), + documentText: openDoc.getText(), + })); + // Ensure sourceUri is included in open documents + if (!openDocuments.some((doc) => doc.uri === sourceUri)) { return { status: "failed_precondition", result: false }; } - const workerProcess: WorkerProcess | undefined = - serverState.workerProcesses[solFileEntry.project.basePath]; - - if (workerProcess === undefined) { - serverState.logger.error( - new Error( - `No worker process for project: ${solFileEntry.project.basePath}` - ) + // Associate validation request id to this file + const validationId = ++serverState.validationCount; + serverState.lastValidationId[sourceUri] = validationId; + + const { project } = solFileEntry; + let validationResult: ValidationResult; + + const logger = _.clone(serverState.logger); + logger.tag = `${path.basename(project.basePath)}:${validationId}`; + + try { + let compilationDetails: CompilationDetails; + let compilerOutput: any; + + // Get solc input from framework provider + await logger.trackTime( + `Building compilation (${project.frameworkName()} - ${path.basename( + sourceUri + )})`, + async () => { + compilationDetails = await project.buildCompilation( + sourceUri, + openDocuments + ); + } ); - return { status: "failed_precondition", result: false }; - } - - const openDocuments = getOpenDocumentsInProject( - serverState, - solFileEntry.project - ); + // Use bundled hardhat to compile + await logger.trackTime("Compiling", async () => { + compilerOutput = await CompilationService.compile(compilationDetails!); + }); - const documentText = change.document.getText(); - - const completeMessage = await workerProcess.validate({ - uri: internalUri, - documentText, - projectBasePath: solFileEntry.project.basePath, - openDocuments: openDocuments.map((openDoc) => ({ - uri: decodeUriAndRemoveFilePrefix(openDoc.uri), - documentText: openDoc.getText(), - })), - }); + validationResult = OutputConverter.getValidationResults( + compilationDetails!, + compilerOutput, + project.basePath + ); + } catch (error: any) { + logger.trace(error); + + if (error._isBuildInputError) { + // Framework provider detailed error on why buildInput failed + validationResult = { + status: "BUILD_INPUT_ERROR", + error, + }; + } else { + // Generic catch-all error + validationResult = { + status: "JOB_COMPLETION_ERROR", + projectBasePath: project.basePath, + reason: error?.message ?? error, + }; + } + } - sendResults(serverState, change, completeMessage); + // Only show validation result if this is the latest validation request for this file + if (serverState.lastValidationId[sourceUri] === validationId) { + sendResults( + serverState, + change, + validationResult, + openDocuments, + project.basePath + ); + } return { status: "ok", - result: completeMessage.status === "VALIDATION_PASS", + result: validationResult.status === "VALIDATION_PASS", }; }); } @@ -94,135 +143,114 @@ export async function validate( function sendResults( serverState: ServerState, change: TextDocumentChangeEvent, - completeMessage: ValidationCompleteMessage + completeMessage: ValidationResult, + openDocuments: OpenDocuments, + projectBasePath: string ) { switch (completeMessage.status) { - case "HARDHAT_ERROR": - return hardhatThrownFail(serverState, change, completeMessage); case "JOB_COMPLETION_ERROR": - return jobCompletionErrorFail(serverState, change, completeMessage); - case "VALIDATOR_ERROR": - return validatorErrorFail(serverState, change, completeMessage); - case "UNKNOWN_ERROR": - return unknownErrorFail(serverState, change, completeMessage); + jobCompletionErrorFail(serverState, change, completeMessage); + break; case "VALIDATION_FAIL": - return validationFail(serverState, change, completeMessage); + validationFail(serverState, change, completeMessage); + break; case "VALIDATION_PASS": - return validationPass(serverState, change, completeMessage); - case "CANCELLED": - return cancelled(serverState, change, completeMessage); + validationPass(serverState, change, completeMessage, openDocuments); + break; + case "BUILD_INPUT_ERROR": + handleBuildInputError( + serverState, + change, + completeMessage, + projectBasePath + ); + break; default: - return assertUnknownMessageStatus(completeMessage); + assertUnknownMessageStatus(completeMessage); + break; } } -function hardhatThrownFail( +function handleBuildInputError( serverState: ServerState, { document }: TextDocumentChangeEvent, - { projectBasePath, hardhatError }: HardhatThrownError + { error }: BuildInputFailed, + projectBasePath: string ) { - const diagnostic = convertHardhatErrorToDiagnostic(document, hardhatError); + // Clear existing diagnostics + clearDiagnostics(serverState, document.uri); + + // Handle file-specific errors + for (const [sourceUri, fileErrors] of Object.entries( + error.fileSpecificErrors + )) { + // Send diagnostics if the position is specified + const diagnostics = fileErrors + .filter((e) => e.startOffset !== undefined && e.endOffset !== undefined) + .map(({ error: fileError, startOffset, endOffset }) => ({ + severity: DiagnosticSeverity.Error, + source: fileError.source, + code: fileError.code, + message: fileError.message, + range: offsetsToRange(document, startOffset!, endOffset!), + })); - if (diagnostic === null) { - // note the error - serverState.logger.error(hardhatError); - - // clear any diagnostics on the page serverState.connection.sendDiagnostics({ - uri: document.uri, - diagnostics: [], + uri: sourceUri, + diagnostics, }); - const displayText = hardhatError.errorDescriptor.title; - - const errorFile = isHardhatSourceImportError(hardhatError) - ? resolveErrorFilePath(projectBasePath, hardhatError) - : undefined; - - const validationJobStatus: ValidationJobStatusNotification = { - validationRun: false, + // Send status item error + sendStatusItemError( + serverState, projectBasePath, - reason: "non-import line hardhat error", - displayText, - errorFile, - }; - - serverState.connection.sendNotification( - "custom/validation-job-status", - validationJobStatus + fileErrors.map((e) => e.error.message).join(", "), + sourceUri ); - } else { - serverState.connection.sendDiagnostics({ - uri: document.uri, - diagnostics: [diagnostic], - }); - - const validationJobStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath, - reason: "import line hardhat error", - displayText: "import error", - }; + } - serverState.connection.sendNotification( - "custom/validation-job-status", - validationJobStatus - ); + // Send status item for project-wide errors + for (const projectWideError of error.projectWideErrors) { + sendStatusItemError(serverState, projectBasePath, projectWideError.message); } } -function jobCompletionErrorFail( - serverState: ServerState, - { document }: TextDocumentChangeEvent, - jobCompletionError: JobCompletionError -) { +function clearDiagnostics(serverState: ServerState, uri: string) { serverState.connection.sendDiagnostics({ - uri: document.uri, + uri, diagnostics: [], }); - - const data: ValidationJobStatusNotification = - jobStatusFrom(jobCompletionError); - - serverState.connection.sendNotification("custom/validation-job-status", data); } -function validatorErrorFail( +function sendStatusItemError( serverState: ServerState, - { document }: TextDocumentChangeEvent, - validatorError: ValidatorError + projectBasePath: string, + message: string, + errorFile?: string ) { - serverState.connection.sendDiagnostics({ - uri: document.uri, - diagnostics: [], - }); - const data: ValidationJobStatusNotification = jobStatusFrom(validatorError); + const validationJobStatus: ValidationJobStatusNotification = { + validationRun: false, + projectBasePath, + reason: message, + displayText: message, + errorFile, + }; - serverState.connection.sendNotification("custom/validation-job-status", data); + serverState.connection.sendNotification( + "custom/validation-job-status", + validationJobStatus + ); } -function unknownErrorFail( +function jobCompletionErrorFail( serverState: ServerState, { document }: TextDocumentChangeEvent, - { error, projectBasePath }: UnknownError + jobCompletionError: JobCompletionError ) { - // clear any current diagnostics - serverState.connection.sendDiagnostics({ - uri: document.uri, - diagnostics: [], - }); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const displayText = (error as any)?.message ?? "internal error"; + clearDiagnostics(serverState, document.uri); - serverState.logger.error(deserializeError(error)); - - const data: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath, - reason: "unknown", - displayText, - }; + const data: ValidationJobStatusNotification = + jobStatusFrom(jobCompletionError); serverState.connection.sendNotification("custom/validation-job-status", data); } @@ -277,40 +305,12 @@ function jobStatusFrom({ reason, displayText: "no compatibile solc version found", }; - case "validator-starting": - return { - validationRun: false, - projectBasePath, - reason, - displayText: "validator starting", - }; - case "validator-initialization-failed": - return { - validationRun: false, - projectBasePath, - reason, - displayText: "unable to load hardhat config", - }; - case "validator-in-unexpected-state": - return { - validationRun: false, - projectBasePath, - reason, - displayText: "validator in unexpected state", - }; - case "contract-not-in-project": - return { - validationRun: false, - projectBasePath, - reason, - displayText: "not part of hardhat project", - }; default: return { validationRun: false, projectBasePath, reason, - displayText: "unknown failure reason", + displayText: reason, }; } } @@ -318,15 +318,21 @@ function jobStatusFrom({ function validationPass( serverState: ServerState, _change: TextDocumentChangeEvent, - message: ValidationPass + message: ValidationPass, + openDocuments: OpenDocuments ): void { for (const source of message.sources) { - const uri = runningOnWindows() ? `/${source}` : source; + // TODO: improve this. Currently necessary because on hardhat source names are not full paths + let uri = openDocuments + .map((doc) => doc.uri) + .find((u) => toUnixStyle(u).endsWith(source)); + if (uri === undefined) { + continue; + } - serverState.connection.sendDiagnostics({ - uri, - diagnostics: [], - }); + uri = runningOnWindows() ? `/${uri}` : uri; + + clearDiagnostics(serverState, uri); } sendValidationProcessSuccess( @@ -348,7 +354,9 @@ function validationFail( diagnosticConverter.convertErrors(change.document, message.errors); const diagnosticsInOpenEditor = Object.entries(diagnostics) - .filter(([diagnosticUri]) => document.uri.includes(diagnosticUri)) + .filter(([diagnosticUri]) => + decodeURIComponent(document.uri).includes(diagnosticUri) + ) .flatMap(([, diagnostic]) => diagnostic); serverState.connection.sendDiagnostics({ @@ -363,38 +371,20 @@ function validationFail( ); } -function cancelled( - serverState: ServerState, - _change: TextDocumentChangeEvent, - message: CancelledValidation -): void { - serverState.logger.trace(`Cancelled validation job ${message.jobId}`); -} - -function assertUnknownMessageStatus(completeMessage: never) { +function assertUnknownMessageStatus(completeMessage: ValidationResult) { throw new Error( // eslint-disable-next-line @typescript-eslint/no-explicit-any `Unrecognized message status: ${(completeMessage as any)?.status}` ); } -function isHardhatSourceImportError( - error: HardhatError -): error is HardhatSourceImportError { - return ( - error.errorDescriptor.number >= 400 && error.errorDescriptor.number <= 499 - ); -} - -function resolveErrorFilePath( - projectBasePath: string, - hardhatError: HardhatSourceImportError -): string { - const errorPath = decodeUriAndRemoveFilePrefix( - path.join(projectBasePath, hardhatError.messageArguments.from) - ); - - const osPath = runningOnWindows() ? `/${errorPath}` : errorPath; - - return osPath; +function offsetsToRange( + document: TextDocument, + startOffset: number, + endOffset: number +): Range { + return { + start: document.positionAt(startOffset), + end: document.positionAt(endOffset), + }; } diff --git a/server/src/services/validation/worker.ts b/server/src/services/validation/worker.ts deleted file mode 100644 index ddb71e05..00000000 --- a/server/src/services/validation/worker.ts +++ /dev/null @@ -1,42 +0,0 @@ -/* istanbul ignore file: setup point for validation process */ -import type { - InitialisationCompleteMessage, - ValidationCompleteMessage, -} from "../../types"; -import { dispatch } from "./worker/dispatch"; -import { initialiseWorkerState } from "./worker/initialiseWorkerState"; -import { setupWorkerLogger } from "./worker/setupWorkerLogger"; - -const initialiseWorker = async () => { - const workerLogger = setupWorkerLogger(); - - workerLogger.trace("[WORKER] Starting Hardhat Worker"); - const workserState = await initialiseWorkerState(send, workerLogger); - - workerLogger.trace("[WORKER] Waiting for messages ..."); - - process.on("message", dispatch(workserState)); - - await workserState.send({ type: "INITIALISATION_COMPLETE" }); -}; - -function send( - message: InitialisationCompleteMessage | ValidationCompleteMessage -): Promise { - return new Promise((resolve, reject) => { - if (!process.send) { - return; - } - - process.send(message, (err: unknown) => { - if (err) { - return reject(err); - } - - resolve(); - }); - }); -} - -// eslint-disable-next-line @typescript-eslint/no-floating-promises -initialiseWorker(); diff --git a/server/src/services/validation/worker/build/buildInputsToSolc.ts b/server/src/services/validation/worker/build/buildInputsToSolc.ts deleted file mode 100644 index be7f35d2..00000000 --- a/server/src/services/validation/worker/build/buildInputsToSolc.ts +++ /dev/null @@ -1,377 +0,0 @@ -import { analyze } from "@nomicfoundation/solidity-analyzer"; -import { isDeepStrictEqual } from "util"; -import type { - CompilerInput, - HardhatRuntimeEnvironment, - SolcBuild, -} from "hardhat/types"; -import { - WorkerState, - BuildJob, - ValidationCompleteMessage, - JobCompletionError, -} from "../../../../types"; -import { runningOnWindows } from "../../../../utils/operatingSystem"; - -export interface SolcInput { - built: true; - jobId: number; - solcVersion: string; - input: CompilerInput; - solcBuild: SolcBuild; - sourcePaths: string[]; -} - -export async function buildInputsToSolc( - workerState: WorkerState, - buildJob: BuildJob -): Promise<{ built: false; result: ValidationCompleteMessage } | SolcInput> { - const analysis = analyze(buildJob.documentText); - - if ( - workerState.previousChangedDocAnalysis !== undefined && - buildJob.uri === workerState.previousChangedDocAnalysis.uri && - isDeepStrictEqual(analysis, workerState.previousChangedDocAnalysis.analysis) - ) { - if (workerState.previousSolcInput !== undefined) { - const { overwrite } = overwriteWithCurrentChanges( - workerState.previousSolcInput, - buildJob.uri, - buildJob.documentText - ); - - if (!overwrite) { - // log and continue - workerState.logger.error( - `Unable to overwrite changed doc at: ${buildJob.uri}` - ); - } else { - buildJob.preprocessingFinished = new Date(); - buildJob.fromInputCache = true; - return workerState.previousSolcInput; - } - } - } else { - workerState.previousChangedDocAnalysis = { uri: buildJob.uri, analysis }; - } - - workerState.hre.tasks[ - workerState.tasks.TASK_COMPILE_SOLIDITY_READ_FILE - ].setAction( - async ( - args: { absolutePath: string }, - hre: HardhatRuntimeEnvironment, - runSuper: () => {} - ) => { - const normalizedUri = args.absolutePath.replaceAll("\\", "/"); - - const openDoc = buildJob.openDocuments.find( - (doc) => doc.uri === normalizedUri - ); - - if (openDoc !== undefined) { - return openDoc.documentText; - } - - return workerState.originalReadFileAction(args, hre, runSuper); - } - ); - - await getSourcePaths(workerState, buildJob); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - await getSourceNames(workerState, buildJob); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - await readFileCache(workerState, buildJob); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - await getDependencyGraph(workerState, buildJob); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - getValidationFile(workerState, buildJob); - - if (buildJob.context.file === undefined) { - return jobCompletionError(buildJob, "contract-not-in-project"); - } - - const result = await getCompilationJob(workerState, buildJob); - - if (result !== null) { - return { built: false, result }; - } - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - await getSolcInput(workerState, buildJob); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - const solcVersion = buildJob.context.compilationJob.getSolcConfig().version; - - await getSolcBuild(workerState, buildJob, solcVersion); - - if (isJobCancelled(buildJob)) { - return cancel(buildJob); - } - - buildJob.preprocessingFinished = new Date(); - - const solcInput: SolcInput = { - built: true, - solcVersion, - jobId: buildJob.jobId, - input: buildJob.context.input, - solcBuild: buildJob.context.solcBuild, - sourcePaths: buildJob.context.sourcePaths ?? [], - }; - - workerState.previousSolcInput = solcInput; - - return solcInput; -} - -function overwriteWithCurrentChanges( - previous: SolcInput, - changedUri: string, - changedDocumentText: string -) { - const normalizedChangedUri = changedUri.replaceAll("\\", "/"); - const changedDocKey = Object.keys(previous.input.sources).find((k) => - normalizedChangedUri.endsWith(k) - ); - - if (changedDocKey === undefined) { - return { overwrite: false }; - } - - previous.input.sources[changedDocKey] = { content: changedDocumentText }; - - return { overwrite: true }; -} - -// Gets the paths to the contract files for the project -async function getSourcePaths( - { hre, tasks: { TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS } }: WorkerState, - { context }: BuildJob -) { - context.sourcePaths = await hre.run(TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS); -} - -// transform them into relative paths to the project base path -async function getSourceNames( - { hre, tasks: { TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES } }: WorkerState, - { context }: BuildJob -) { - context.sourceNames = await hre.run( - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES, - context - ); -} - -async function readFileCache( - { SolidityFilesCache, solidityFilesCachePath }: WorkerState, - { context }: BuildJob -) { - context.solidityFilesCache = await SolidityFilesCache.readFromFile( - solidityFilesCachePath - ); -} - -async function getDependencyGraph( - { hre, tasks: { TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH } }: WorkerState, - { context }: BuildJob -) { - context.dependencyGraph = await hre.run( - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH, - context - ); -} - -function getValidationFile( - _workerState: WorkerState, - { uri, context }: BuildJob -): void { - context.file = context.dependencyGraph - .getResolvedFiles() - .filter((f: { absolutePath: string }) => - uriEquals(f.absolutePath.replaceAll("\\", "/"), uri) - )[0]; -} - -async function getCompilationJob( - { - hre, - tasks: { TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE }, - logger, - }: WorkerState, - { jobId, projectBasePath, context, startTime }: BuildJob -): Promise { - context.compilationJob = await hre.run( - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE, - { - file: context.file, - dependencyGraph: context.dependencyGraph, - solidityFilesCache: context.solidityFilesCache, - } - ); - - if (context.compilationJob.reason) { - logger.trace( - `[WORKER] Compilation job failed (${ - (new Date().getTime() - startTime.getTime()) / 1000 - }) - ${context.compilationJob.reason}` - ); - - return { - type: "VALIDATION_COMPLETE", - status: "JOB_COMPLETION_ERROR", - jobId, - projectBasePath, - reason: context.compilationJob.reason, - }; - } - - return null; -} - -async function getSolcInput( - { hre, tasks: { TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT } }: WorkerState, - { uri, documentText, openDocuments, context }: BuildJob -): Promise { - const modifiedFiles = { - [uri]: documentText, - }; - - for (const unsavedDocument of openDocuments) { - modifiedFiles[unsavedDocument.uri] = unsavedDocument.documentText; - } - - context.compilationJob - .getResolvedFiles() - .forEach( - (file: { absolutePath: string; content: { rawContent: string } }) => { - const normalizeAbsPath = file.absolutePath.replaceAll("\\", "/"); - - if (modifiedFiles[normalizeAbsPath]) { - file.content.rawContent = modifiedFiles[normalizeAbsPath]; - } - } - ); - - context.input = await hre.run( - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, - context - ); - - return null; -} - -/** - * The solc build subtask, this downloads the appropriate compiler - * for the given solc version, then checks the hash of the solc binary. - * As these checks are expensive, we cache in the workerState whether - * the download and check has already been done - * @param workerState the state shared between build jobs - * @param buildJob the container for the context of the build job - * @param solcVersion the solc compiler to download - * @returns a promise that the context has been populated - * with the compiler path details - */ -async function getSolcBuild( - { - hre, - tasks: { TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD }, - compilerMetadataCache, - }: WorkerState, - { context }: BuildJob, - solcVersion: string -) { - try { - const cachedBuildVersionPromise = compilerMetadataCache[solcVersion]; - - if (cachedBuildVersionPromise !== undefined) { - const cachedSolcBuild = await cachedBuildVersionPromise; - - context.solcBuild = cachedSolcBuild; - - return; - } - - const solcBuildPromise = hre.run(TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD, { - quiet: true, - solcVersion, - }); - - compilerMetadataCache[solcVersion] = solcBuildPromise; - - const solcBuild: SolcBuild = await solcBuildPromise; - - context.solcBuild = solcBuild; - } catch (err) { - // remove the cached promise on build task failure - delete compilerMetadataCache[solcVersion]; - - throw err; - } -} - -function cancel({ jobId, projectBasePath }: BuildJob): { - built: false; - result: ValidationCompleteMessage; -} { - return { - built: false, - result: { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId, - projectBasePath, - }, - }; -} - -function jobCompletionError( - { jobId, projectBasePath }: BuildJob, - reason: string -): { - built: false; - result: JobCompletionError; -} { - return { - built: false, - result: { - type: "VALIDATION_COMPLETE", - status: "JOB_COMPLETION_ERROR", - jobId, - projectBasePath, - reason, - }, - }; -} - -function isJobCancelled(buildJob: BuildJob) { - return buildJob.status === "cancelled"; -} - -function uriEquals(uri1: string, uri2: string) { - return runningOnWindows() - ? uri1.toLowerCase() === uri2.toLowerCase() - : uri1 === uri2; -} diff --git a/server/src/services/validation/worker/build/convertErrorToMessage.ts b/server/src/services/validation/worker/build/convertErrorToMessage.ts deleted file mode 100644 index a4561572..00000000 --- a/server/src/services/validation/worker/build/convertErrorToMessage.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { serializeError } from "serialize-error"; -import { HardhatError, ValidationCompleteMessage } from "../../../../types"; - -export function convertErrorToMessage( - err: unknown, - { jobId, projectBasePath }: { jobId: number; projectBasePath: string } -): ValidationCompleteMessage { - if (isHardhatPlatformError(err)) { - return { - type: "VALIDATION_COMPLETE", - status: "HARDHAT_ERROR", - jobId, - projectBasePath, - hardhatError: { - name: "HardhatError", - errorDescriptor: err.errorDescriptor, - messageArguments: err.messageArguments, - }, - }; - } - - return { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId, - projectBasePath, - error: serializeError(err), - }; -} - -function isHardhatPlatformError(err: unknown): err is HardhatError { - return ( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - err !== undefined && err !== null && (err as any)._isHardhatError === true - ); -} diff --git a/server/src/services/validation/worker/build/hardhatBuild.ts b/server/src/services/validation/worker/build/hardhatBuild.ts deleted file mode 100644 index 8fcbb550..00000000 --- a/server/src/services/validation/worker/build/hardhatBuild.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { - BuildJob, - ValidationCompleteMessage, - WorkerState, -} from "../../../../types"; -import { clearPreprocessingCacheState } from "../utils/clearPreprocessingCacheState"; -import { buildInputsToSolc } from "./buildInputsToSolc"; -import { convertErrorToMessage } from "./convertErrorToMessage"; -import { solcCompile, SolcResult } from "./solcCompile"; -import { solcOutputToCompleteMessage } from "./solcOutputToCompleteMessage"; - -export async function hardhatBuild( - workerState: WorkerState, - buildJob: BuildJob -): Promise { - try { - const solcInputs = await buildInputsToSolc(workerState, buildJob); - - if (!solcInputs.built) { - return solcInputs.result; - } - - const solcResult = await solcCompile(workerState, solcInputs); - - clearCacheOnImportLineError(workerState, solcResult); - - return solcOutputToCompleteMessage( - workerState, - buildJob, - solcInputs, - solcResult - ); - } catch (err) { - return convertErrorToMessage(err, buildJob); - } -} - -function clearCacheOnImportLineError( - workerState: WorkerState, - solcResult: SolcResult -) { - if ( - solcResult.output.errors !== undefined && - solcResult.output.errors.some((error) => - ["6275", "7858"].includes(error.errorCode) - ) - ) { - clearPreprocessingCacheState(workerState); - } -} - -export function switchForPaths( - sources: { [key: string]: unknown }, - paths: string[] = [] -): string[] { - return Object.keys(sources) - .map((source) => - paths.find((p) => p.replaceAll("\\", "/").endsWith(source)) - ) - .filter((p): p is string => p !== undefined) - .map((path) => path.replaceAll("\\", "/")); -} diff --git a/server/src/services/validation/worker/build/solcCompile.ts b/server/src/services/validation/worker/build/solcCompile.ts deleted file mode 100644 index 7bf42921..00000000 --- a/server/src/services/validation/worker/build/solcCompile.ts +++ /dev/null @@ -1,73 +0,0 @@ -import type { SolcBuild } from "hardhat/types"; -import { HardhatCompilerError, WorkerState } from "../../../../types"; -import { SolcInput } from "./buildInputsToSolc"; - -export interface SolcResult { - output: { - errors: HardhatCompilerError[]; - sources: { - [key: string]: unknown; - }; - }; - solcBuild: SolcBuild; -} - -export async function solcCompile( - { - hre, - tasks: { TASK_COMPILE_SOLIDITY_RUN_SOLCJS, TASK_COMPILE_SOLIDITY_RUN_SOLC }, - }: WorkerState, - { input, solcBuild }: SolcInput -): Promise { - let output; - - const originalInput = input as { settings: {} }; - - const overriddenInput = { - ...originalInput, - settings: { - ...originalInput.settings, - outputSelection: {}, - }, - }; - - if (solcBuild.isSolcJs) { - output = await hre.run(TASK_COMPILE_SOLIDITY_RUN_SOLCJS, { - input: overriddenInput, - solcJsPath: solcBuild.compilerPath, - }); - } else { - output = await hre.run(TASK_COMPILE_SOLIDITY_RUN_SOLC, { - input: overriddenInput, - solcPath: solcBuild.compilerPath, - }); - } - - // Normalize errors' sourceLocation to use utf-8 offsets instead of byte offsets - for (const error of output.errors || []) { - const source = input.sources[error.sourceLocation?.file]; - - if (source === undefined) { - continue; - } - - error.sourceLocation.start = normalizeOffset( - source.content, - error.sourceLocation.start - ); - error.sourceLocation.end = normalizeOffset( - source.content, - error.sourceLocation.end - ); - } - - return { output, solcBuild }; -} - -const normalizeOffset = (text: string, offset: number) => { - if (offset < 0) { - return offset; // don't transform negative offsets - } else { - return Buffer.from(text, "utf-8").slice(0, offset).toString("utf-8").length; - } -}; diff --git a/server/src/services/validation/worker/build/solcOutputToCompleteMessage.ts b/server/src/services/validation/worker/build/solcOutputToCompleteMessage.ts deleted file mode 100644 index 5b41cf7c..00000000 --- a/server/src/services/validation/worker/build/solcOutputToCompleteMessage.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { - BuildJob, - CancelledValidation, - ValidationCompleteMessage, - ValidationFail, - ValidationPass, - WorkerState, -} from "../../../../types"; -import { SolcInput } from "./buildInputsToSolc"; -import { SolcResult } from "./solcCompile"; -import { switchForPaths } from "./hardhatBuild"; - -export function solcOutputToCompleteMessage( - workerState: WorkerState, - buildJob: BuildJob, - solcInputs: SolcInput, - { output }: SolcResult -): ValidationCompleteMessage { - if (buildJob.status === "cancelled") { - const cancelledMessage: CancelledValidation = { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: buildJob.jobId, - projectBasePath: buildJob.projectBasePath, - }; - - return cancelledMessage; - } - - if (output.errors?.length > 0) { - logCompletionMessage(workerState, buildJob, "Fail"); - - const validationFailMessage: ValidationFail = { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_FAIL", - jobId: buildJob.jobId, - projectBasePath: buildJob.projectBasePath, - version: solcInputs.solcVersion, - errors: output.errors, - }; - - return validationFailMessage; - } else { - logCompletionMessage(workerState, buildJob, "Pass"); - - const validationPassMessage: ValidationPass = { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: buildJob.jobId, - projectBasePath: buildJob.projectBasePath, - version: solcInputs.solcVersion, - sources: switchForPaths(output.sources, solcInputs.sourcePaths), - }; - - return validationPassMessage; - } -} - -function logCompletionMessage( - workerState: WorkerState, - buildJob: BuildJob, - passOrFail: "Pass" | "Fail" -) { - const finalSection = - buildJob.preprocessingFinished === undefined - ? "" - : `, prep: ${timeSinceInSecs( - buildJob.startTime, - buildJob.preprocessingFinished - )}, solc: ${timeSinceInSecs(buildJob.preprocessingFinished)}`; - - workerState.logger.trace( - `[WORKER:${buildJob.jobId}] Validation complete - ${passOrFail} ${ - buildJob.fromInputCache ? "[Cached]" : "" - } (total: ${timeSinceInSecs(buildJob.added)}, queued: ${timeSinceInSecs( - buildJob.added, - buildJob.startTime - )}${finalSection})` - ); -} - -function timeSinceInSecs(startTime: Date, endTime: Date = new Date()) { - return (endTime.getTime() - startTime.getTime()) / 1000; -} diff --git a/server/src/services/validation/worker/dispatch.ts b/server/src/services/validation/worker/dispatch.ts deleted file mode 100644 index 7963e3df..00000000 --- a/server/src/services/validation/worker/dispatch.ts +++ /dev/null @@ -1,155 +0,0 @@ -import type { - BuildDetails, - BuildJob, - CancelledValidation, - HardhatWorkerCommand, - ValidateCommand, - WorkerState, -} from "../../../types"; -import { convertErrorToMessage } from "./build/convertErrorToMessage"; -import { hardhatBuild } from "./build/hardhatBuild"; -import { clearPreprocessingCacheState } from "./utils/clearPreprocessingCacheState"; - -export function dispatch(workerState: WorkerState) { - return async (command: HardhatWorkerCommand) => { - try { - switch (command.type) { - case "VALIDATE": - return await validate(workerState, command); - case "INVALIDATE_PREPROCESSING_CACHE": - return invalidatePreprocessingCache(workerState); - } - } catch (err: unknown) { - /* istanbul ignore else */ - if (err instanceof Error) { - workerState.logger.error(err.message); - } else { - workerState.logger.error(JSON.stringify(err)); - } - - if (command.type === "VALIDATE") { - try { - const message = convertErrorToMessage(err, command); - - await workerState.send(message); - } catch (innerErr: unknown) { - // log and ignore - /* istanbul ignore else */ - if (err instanceof Error) { - workerState.logger.error(err.message); - } else { - workerState.logger.error(JSON.stringify(err)); - } - } - } - - // clear the state - workerState.current = null; - workerState.buildQueue = []; - workerState.buildJobs = {}; - workerState.compilerMetadataCache = {}; - workerState.previousSolcInput = undefined; - workerState.previousChangedDocAnalysis = undefined; - } - }; -} - -function invalidatePreprocessingCache(workerState: WorkerState) { - workerState.logger.trace(`[WORKER] Preprocessing cache cleared`); - - clearPreprocessingCacheState(workerState); -} - -async function validate(workerState: WorkerState, command: ValidateCommand) { - workerState.logger.trace(`[WORKER] Running validate: ${command.uri}`); - - await recordCommand(workerState, command); - - if (!workerState.buildQueue.includes(command.uri)) { - workerState.buildQueue.push(command.uri); - } - - if (workerState.current !== null) { - if (workerState.current.uri === command.uri) { - workerState.current.status = "cancelled"; - } - - return; - } - - return runNextJob(workerState); -} - -async function recordCommand( - workerState: WorkerState, - command: ValidateCommand -) { - if (command.uri in workerState.buildJobs) { - const previous = workerState.buildJobs[command.uri]; - - const cancelledMessage: CancelledValidation = { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: previous.jobId, - projectBasePath: previous.projectBasePath, - }; - - await workerState.send(cancelledMessage); - } - - workerState.buildJobs[command.uri] = { - uri: command.uri, - jobId: command.jobId, - added: new Date(), - projectBasePath: command.projectBasePath, - documentText: command.documentText, - openDocuments: command.openDocuments, - }; -} - -async function runNextJob(workerState: WorkerState): Promise { - const uri = workerState.buildQueue.pop(); - - if (uri === undefined) { - if (Object.values(workerState.buildJobs).length > 0) { - throw new Error("POST CONDITION NOT MET: build jobs not cleared"); - } - - workerState.current = null; - - return; - } - - const lastDetails: BuildDetails | undefined = workerState.buildJobs[uri]; - delete workerState.buildJobs[uri]; - - /* istanbul ignore if */ - if (lastDetails === undefined) { - throw new Error(`No job details in build jobs for ${uri}`); - } - - const buildJob: BuildJob = { - status: "processing", - startTime: new Date(), - context: {}, - - uri: lastDetails.uri, - jobId: lastDetails.jobId, - projectBasePath: lastDetails.projectBasePath, - documentText: lastDetails.documentText, - openDocuments: lastDetails.openDocuments, - added: lastDetails.added, - - fromInputCache: false, - }; - - workerState.current = buildJob; - - const buildResult = await hardhatBuild(workerState, buildJob); - - await workerState.send(buildResult); - - workerState.current = null; - - return runNextJob(workerState); -} diff --git a/server/src/services/validation/worker/initialiseWorkerState.ts b/server/src/services/validation/worker/initialiseWorkerState.ts deleted file mode 100644 index e918a0c5..00000000 --- a/server/src/services/validation/worker/initialiseWorkerState.ts +++ /dev/null @@ -1,82 +0,0 @@ -/* istanbul ignore file: top level node loading */ -import path from "path"; -import type { - InitialisationCompleteMessage, - ValidationCompleteMessage, - WorkerLogger, - WorkerState, -} from "../../../types"; - -export async function initialiseWorkerState( - send: ( - message: InitialisationCompleteMessage | ValidationCompleteMessage - ) => Promise, - logger: WorkerLogger -): Promise { - let hre; - - let hardhatBase = ""; - try { - hardhatBase = path.resolve( - require.resolve("hardhat", { paths: [process.cwd()] }), - "..", - "..", - ".." - ); - - require(`${hardhatBase}/register.js`); - - hre = require(`${hardhatBase}/internal/lib/hardhat-lib.js`); - } catch (err) { - throw new Error(`Unable to initialize Hardhat Runtime Environment`); - } - - const { - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS, - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES, - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH, - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE, - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD, - TASK_COMPILE_SOLIDITY_RUN_SOLCJS, - TASK_COMPILE_SOLIDITY_RUN_SOLC, - TASK_COMPILE_SOLIDITY_READ_FILE, - // eslint-disable-next-line @typescript-eslint/no-var-requires - } = require(`${hardhatBase}/builtin-tasks/task-names`); - - const { - getSolidityFilesCachePath, - SolidityFilesCache, - // eslint-disable-next-line @typescript-eslint/no-var-requires - } = require(`${hardhatBase}/builtin-tasks/utils/solidity-files-cache`); - - const solidityFilesCachePath = getSolidityFilesCachePath(hre.config.paths); - - const originalReadFileAction = - hre.tasks[TASK_COMPILE_SOLIDITY_READ_FILE].action; - - return { - current: null, - buildQueue: [], - buildJobs: {}, - compilerMetadataCache: {}, - - hre, - originalReadFileAction, - solidityFilesCachePath, - SolidityFilesCache, - tasks: { - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS, - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES, - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH, - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE, - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT, - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD, - TASK_COMPILE_SOLIDITY_RUN_SOLCJS, - TASK_COMPILE_SOLIDITY_RUN_SOLC, - TASK_COMPILE_SOLIDITY_READ_FILE, - }, - send, - logger, - }; -} diff --git a/server/src/services/validation/worker/setupWorkerLogger.ts b/server/src/services/validation/worker/setupWorkerLogger.ts deleted file mode 100644 index 2182ceab..00000000 --- a/server/src/services/validation/worker/setupWorkerLogger.ts +++ /dev/null @@ -1,13 +0,0 @@ -/* istanbul ignore file: top level dependency injection */ -/* eslint-disable no-console */ -import type { WorkerLogger } from "../../../types"; - -export function setupWorkerLogger(): WorkerLogger { - return { - log: console.log, - error: console.error, - trace: () => { - return null; - }, - }; -} diff --git a/server/src/services/validation/worker/types.ts b/server/src/services/validation/worker/types.ts deleted file mode 100644 index 931b6693..00000000 --- a/server/src/services/validation/worker/types.ts +++ /dev/null @@ -1,2 +0,0 @@ -/* eslint-disable @typescript-eslint/naming-convention */ -/* eslint-disable @typescript-eslint/no-explicit-any */ diff --git a/server/src/services/validation/worker/utils/clearPreprocessingCacheState.ts b/server/src/services/validation/worker/utils/clearPreprocessingCacheState.ts deleted file mode 100644 index bc1e9e57..00000000 --- a/server/src/services/validation/worker/utils/clearPreprocessingCacheState.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { WorkerState } from "../../../../types"; - -export function clearPreprocessingCacheState(workerState: WorkerState) { - workerState.previousSolcInput = undefined; - workerState.previousChangedDocAnalysis = undefined; -} diff --git a/server/src/types.ts b/server/src/types.ts index 8bd33e50..a331f5fb 100644 --- a/server/src/types.ts +++ b/server/src/types.ts @@ -1,57 +1,12 @@ import type { Connection } from "vscode-languageserver"; -import type { Serializable } from "child_process"; import type { TextDocuments } from "vscode-languageserver/node"; import type { TextDocument } from "vscode-languageserver-textdocument"; import type { Logger } from "@utils/Logger"; import type { WorkspaceFolder } from "vscode-languageserver-protocol"; -import type { SolFileIndexMap, SolProjectMap, Diagnostic } from "@common/types"; -import type { HardhatProject } from "@analyzer/HardhatProject"; -import type { HardhatRuntimeEnvironment, SolcBuild } from "hardhat/types"; -import type { AnalysisResult } from "@nomicfoundation/solidity-analyzer"; -import type { SolcInput } from "@services/validation/worker/build/buildInputsToSolc"; +import type { SolFileIndexMap, SolProjectMap } from "@common/types"; import type { Telemetry } from "./telemetry/types"; - -export type CancelResolver = (diagnostics: { - [key: string]: Diagnostic[]; -}) => void; - -export interface CompilerProcess { - init: (document: TextDocument) => { - hardhatConfigFileExistPromise: Promise; - compilerDownloadedPromise: Promise; - solidityCompilePromise: Promise; - }; - - send: (message: Serializable) => void; - kill: () => void; -} - -export type CompilerProcessFactory = ( - project: HardhatProject, - logger: Logger, - connection: Connection -) => WorkerProcess; - -export interface WorkerProcess { - project: HardhatProject; - init: () => void; - validate: (details: { - uri: string; - documentText: string; - projectBasePath: string; - openDocuments: Array<{ - uri: string; - documentText: string; - }>; - }) => Promise; - invalidatePreprocessingCache: () => Promise; - kill: () => void; - restart: () => Promise; -} - -export interface WorkerProcesses { - [key: string]: WorkerProcess; -} +import { BuildInputError } from "./frameworks/base/Errors"; +import { WorkspaceFileRetriever } from "./utils/WorkspaceFileRetriever"; export interface ServerState { env: "production" | "development"; @@ -59,158 +14,26 @@ export interface ServerState { globalTelemetryEnabled: boolean; hardhatTelemetryEnabled: boolean; - indexJobCount: number; - - compProcessFactory: CompilerProcessFactory; connection: Connection; documents: TextDocuments; - workspaceFolders: WorkspaceFolder[]; + indexedWorkspaceFolders: WorkspaceFolder[]; + workspaceFoldersToIndex: WorkspaceFolder[]; projects: SolProjectMap; solFileIndex: SolFileIndexMap; - workerProcesses: WorkerProcesses; telemetry: Telemetry; logger: Logger; -} - -export interface WorkerLogger { - log: (text: string) => void; - error: (text: string) => void; - trace: (text: string) => void; -} - -export interface BuildContext { - sourcePaths?: string[]; - sourceNames?: string[]; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - solidityFilesCache?: any; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - dependencyGraph?: any; - file?: string; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - compilationJob?: any; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - input?: any; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - solcBuild?: any; -} - -export interface BuildJob extends BuildDetails { - status: "processing" | "cancelled"; - context: BuildContext; - startTime: Date; - preprocessingFinished?: Date; - fromInputCache: boolean; -} - -export interface BuildDetails { - uri: string; - jobId: number; - projectBasePath: string; - documentText: string; - openDocuments: Array<{ - uri: string; - documentText: string; - }>; - added: Date; -} - -export interface WorkerState { - current: null | BuildJob; - buildQueue: string[]; - buildJobs: { [key: string]: BuildDetails }; - compilerMetadataCache: { [key: string]: Promise }; - previousChangedDocAnalysis?: { uri: string; analysis: AnalysisResult }; - previousSolcInput?: SolcInput; - - hre: HardhatRuntimeEnvironment; - originalReadFileAction: ( - args: { absolutePath: string }, - hre: HardhatRuntimeEnvironment, - runSuper: () => {} - ) => Promise; - solidityFilesCachePath: string; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - SolidityFilesCache: any; - tasks: { - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_RUN_SOLCJS: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_RUN_SOLC: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_READ_FILE: string; - }; - send: ( - message: InitialisationCompleteMessage | ValidationCompleteMessage - ) => Promise; - logger: WorkerLogger; -} - -export interface HardhatImportFileError extends HardhatError { - messageArguments: { - imported: string; - }; -} - -export interface HardhatImportLibraryError extends HardhatError { - messageArguments: { - library: string; - }; -} + solcVersions: string[]; + indexingFinished: boolean; -export interface HardhatSourceImportError extends HardhatError { - messageArguments: { - imported: string; - from: string; - }; + // Associate validation request ids to files to solve parallel validation jobs on the same file + validationCount: number; + lastValidationId: { [uri: string]: number }; + workspaceFileRetriever: WorkspaceFileRetriever; } -export interface HardhatError { - name: "HardhatError"; - errorDescriptor: { - number: number; - message: string; - title: string; - description: string; - shouldBeReported: boolean; - }; - messageArguments?: unknown; -} - -export interface ValidateCommand { - type: "VALIDATE"; - jobId: number; - uri: string; - documentText: string; - projectBasePath: string; - openDocuments: Array<{ - uri: string; - documentText: string; - }>; -} - -export interface InvalidatePreprocessingCacheMessage { - type: "INVALIDATE_PREPROCESSING_CACHE"; -} - -export type HardhatWorkerCommand = - | ValidateCommand - | InvalidatePreprocessingCacheMessage; - -export interface HardhatCompilerError { +export interface SolcError { component: "general"; errorCode: string; formattedMessage: string; @@ -221,27 +44,11 @@ export interface HardhatCompilerError { } /** - * While running the validation job, an error was thrown - * from within hardhat. - */ -export interface HardhatThrownError { - type: "VALIDATION_COMPLETE"; - status: "HARDHAT_ERROR"; - jobId: number; - projectBasePath: string; - hardhatError: HardhatError; -} - -/** - * An error with the background validation thread - * e.g. failed to start or has died. + * Framework provider wasn't able to build input */ -export interface ValidatorError { - type: "VALIDATION_COMPLETE"; - status: "VALIDATOR_ERROR"; - jobId: number; - projectBasePath: string; - reason: string; +export interface BuildInputFailed { + status: "BUILD_INPUT_ERROR"; + error: BuildInputError; } /** @@ -250,31 +57,19 @@ export interface ValidatorError { * the solc compiler etc. */ export interface JobCompletionError { - type: "VALIDATION_COMPLETE"; status: "JOB_COMPLETION_ERROR"; - jobId: number; projectBasePath: string; reason: string; } -export interface UnknownError { - type: "VALIDATION_COMPLETE"; - status: "UNKNOWN_ERROR"; - jobId: number; - projectBasePath: string; - error: unknown; -} - /** * The validation job ran and solc returned warnings/errors */ export interface ValidationFail { - type: "VALIDATION_COMPLETE"; status: "VALIDATION_FAIL"; - jobId: number; projectBasePath: string; version: string; - errors: HardhatCompilerError[]; + errors: SolcError[]; } /** @@ -282,37 +77,17 @@ export interface ValidationFail { * indicating the code would compile. */ export interface ValidationPass { - type: "VALIDATION_COMPLETE"; status: "VALIDATION_PASS"; - jobId: number; projectBasePath: string; version: string; sources: string[]; } -/** - * The validation job was cancelled part way through, - * probably because a new edit came in. - */ -export interface CancelledValidation { - type: "VALIDATION_COMPLETE"; - status: "CANCELLED"; - jobId: number; - projectBasePath: string; -} - -export interface InitialisationCompleteMessage { - type: "INITIALISATION_COMPLETE"; -} - -export type ValidationCompleteMessage = +export type ValidationResult = | ValidationPass | ValidationFail - | HardhatThrownError | JobCompletionError - | ValidatorError - | CancelledValidation - | UnknownError; + | BuildInputFailed; export interface ValidationJobSuccessNotification { validationRun: true; @@ -331,3 +106,5 @@ export interface ValidationJobFailureNotification { export type ValidationJobStatusNotification = | ValidationJobFailureNotification | ValidationJobSuccessNotification; + +export type OpenDocuments = Array<{ uri: string; documentText: string }>; diff --git a/server/src/utils/Logger.ts b/server/src/utils/Logger.ts index 5996fc22..a676f425 100644 --- a/server/src/utils/Logger.ts +++ b/server/src/utils/Logger.ts @@ -9,6 +9,11 @@ export interface Logger { info(arg: string): void; error(err: unknown): void; trace(message: string, verbose?: {} | undefined): void; + trackTime( + description: string, + callback: () => Promise + ): Promise; + tag?: string; } export type ExceptionCapturer = (err: unknown) => void; @@ -17,6 +22,7 @@ export class ConnectionLogger implements Logger { private connection: Connection; private telemetry: Telemetry; private workspaceName: string | null; + public tag?: string; constructor(connection: Connection, telemetry: Telemetry) { this.connection = connection; @@ -65,10 +71,11 @@ export class ConnectionLogger implements Logger { } private _tryPrepend(arg: string) { + const text = this._printTag() + arg; if (this.workspaceName === null) { - return arg; + return text; } else { - return `[LS: ${this.workspaceName}] ${arg}`; + return `[LS: ${this.workspaceName}] ${text}`; } } @@ -81,4 +88,21 @@ export class ConnectionLogger implements Logger { return "errorDescriptor" in err; } + + public async trackTime( + description: string, + callback: () => Promise + ) { + this.trace(`${description}: Start`); + const startTime = new Date().getTime(); + try { + await callback(); + } finally { + this.trace(`${description}: End (${new Date().getTime() - startTime}ms)`); + } + } + + private _printTag() { + return this.tag !== undefined ? `[${this.tag}] ` : ""; + } } diff --git a/server/src/parser/analyzer/WorkspaceFileRetriever.ts b/server/src/utils/WorkspaceFileRetriever.ts similarity index 100% rename from server/src/parser/analyzer/WorkspaceFileRetriever.ts rename to server/src/utils/WorkspaceFileRetriever.ts diff --git a/server/src/utils/applyEditToDocumentAnalyzer.ts b/server/src/utils/applyEditToDocumentAnalyzer.ts index 8dafe233..d7051638 100644 --- a/server/src/utils/applyEditToDocumentAnalyzer.ts +++ b/server/src/utils/applyEditToDocumentAnalyzer.ts @@ -5,11 +5,11 @@ import { ServerState } from "../types"; import { LookupResult } from "./lookupEntryForUri"; import { getUriFromDocument } from "./index"; -export function applyEditToDocumentAnalyzer( +export async function applyEditToDocumentAnalyzer( serverState: ServerState, uri: string, edit: (document: TextDocument) => string -): LookupResult { +): Promise { const document = serverState.documents.get(uri); if (!document) { @@ -23,7 +23,7 @@ export function applyEditToDocumentAnalyzer( const newDocumentText = edit(document); const solFileEntry = getOrInitialiseSolFileEntry(serverState, documentURI); - analyzeSolFile(serverState, solFileEntry, newDocumentText); + await analyzeSolFile(serverState, solFileEntry, newDocumentText); if (!solFileEntry.isAnalyzed()) { return { diff --git a/server/src/utils/directoryContains.ts b/server/src/utils/directoryContains.ts new file mode 100644 index 00000000..1c8ef3ff --- /dev/null +++ b/server/src/utils/directoryContains.ts @@ -0,0 +1,10 @@ +import path from "path"; + +export function directoryContains(dirPath: string, testPath: string): boolean { + const relative = path.relative(dirPath, testPath); + return ( + !!relative && + !relative.startsWith(`..${path.sep}`) && + !path.isAbsolute(relative) + ); +} diff --git a/server/src/utils/findProjectFor.ts b/server/src/utils/findProjectFor.ts deleted file mode 100644 index 78026e26..00000000 --- a/server/src/utils/findProjectFor.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { NoProject } from "@analyzer/NoProject"; -import { ISolProject, SolProjectMap } from "@common/types"; - -const noProj = new NoProject(); - -export function findProjectFor( - { projects }: { projects: SolProjectMap }, - uri: string -): ISolProject { - for (const project of Object.values(projects)) { - if (uri.startsWith(project.basePath)) { - return project; - } - } - - return noProj; -} diff --git a/server/src/utils/getOrInitialiseSolFileEntry.ts b/server/src/utils/getOrInitialiseSolFileEntry.ts index f507d69a..f383bd8c 100644 --- a/server/src/utils/getOrInitialiseSolFileEntry.ts +++ b/server/src/utils/getOrInitialiseSolFileEntry.ts @@ -1,7 +1,9 @@ import * as fs from "fs"; -import { SolFileIndexMap, ISolFileEntry, SolProjectMap } from "@common/types"; -import { findProjectFor } from "@utils/findProjectFor"; +import { ISolFileEntry } from "@common/types"; +import path from "path"; import { SolFileEntry } from "../parser/analyzer/SolFileEntry"; +import { ServerState } from "../types"; +import { ProjectlessProject } from "../frameworks/Projectless/ProjectlessProject"; /** * Get or create a Solidity file entry for the servers file index. @@ -10,27 +12,17 @@ import { SolFileEntry } from "../parser/analyzer/SolFileEntry"; * Uri needs to be decoded and without the "file://" prefix. */ export function getOrInitialiseSolFileEntry( - { - projects, - solFileIndex, - }: { - projects: SolProjectMap; - solFileIndex: SolFileIndexMap; - }, + serverState: ServerState, uri: string ): ISolFileEntry { - let solFileEntry = solFileIndex[uri]; + let solFileEntry = serverState.solFileIndex[uri]; if (solFileEntry === undefined) { - const project = findProjectFor({ projects }, uri); + const project = new ProjectlessProject(serverState, path.dirname(uri)); if (fs.existsSync(uri)) { const docText = fs.readFileSync(uri).toString(); - solFileEntry = SolFileEntry.createLoadedTrackedEntry( - uri, - project, - docText - ); + solFileEntry = SolFileEntry.createLoadedEntry(uri, project, docText); } else { // TODO: figure out what happens if we just don't do this // why bother with non-existant files? Maybe untitled but unsaved @@ -38,7 +30,7 @@ export function getOrInitialiseSolFileEntry( solFileEntry = SolFileEntry.createUnloadedEntry(uri, project); } - solFileIndex[uri] = solFileEntry; + serverState.solFileIndex[uri] = solFileEntry; } return solFileEntry; diff --git a/server/src/utils/index.ts b/server/src/utils/index.ts index 6968df21..199e2dfc 100644 --- a/server/src/utils/index.ts +++ b/server/src/utils/index.ts @@ -1,5 +1,6 @@ import { TextDocument } from "vscode-languageserver-textdocument"; import { TextDocumentIdentifier } from "vscode-languageserver-protocol"; +import path from "path"; import { runningOnWindows } from "./operatingSystem"; export function getUriFromDocument( @@ -49,3 +50,13 @@ export function isCharacterALetter(char: string): boolean { export function isCharacterANumber(char: string): boolean { return /[0-9]/.test(char); } + +export function uriEquals(uri1: string, uri2: string) { + return runningOnWindows() + ? uri1.toLowerCase() === uri2.toLowerCase() + : uri1 === uri2; +} + +export function normalizeSlashes(p: string) { + return path.sep === "\\" ? p.replace(/\\/g, "/") : p; +} diff --git a/server/src/utils/operatingSystem.ts b/server/src/utils/operatingSystem.ts index 898506e5..0537cc3b 100644 --- a/server/src/utils/operatingSystem.ts +++ b/server/src/utils/operatingSystem.ts @@ -1,5 +1,18 @@ +import { exec } from "child_process"; import os from "os"; export function runningOnWindows() { return os.platform() === "win32"; } + +export async function runCmd(cmd: string, cwd?: string): Promise { + return new Promise((resolve, reject) => { + exec(cmd, { cwd }, function (error, stdout) { + if (error !== null) { + reject(error); + } + + resolve(stdout); + }); + }); +} diff --git a/server/src/utils/sleep.ts b/server/src/utils/sleep.ts new file mode 100644 index 00000000..f9a5c4a7 --- /dev/null +++ b/server/src/utils/sleep.ts @@ -0,0 +1,3 @@ +export async function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} diff --git a/server/test/frameworks/hardhat/HardhatProject.test.ts b/server/test/frameworks/hardhat/HardhatProject.test.ts new file mode 100644 index 00000000..061e1737 --- /dev/null +++ b/server/test/frameworks/hardhat/HardhatProject.test.ts @@ -0,0 +1,74 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { assert } from "chai"; +import { ChildProcess } from "child_process"; +import { + HardhatProject, + WorkerStatus, +} from "../../../src/frameworks/Hardhat/HardhatProject"; +import { FileBelongsRequest } from "../../../src/frameworks/Hardhat/worker/WorkerProtocol"; +import { ServerState } from "../../../src/types"; + +describe("HardhatProject", function () { + let project: HardhatProject; + const workerProcessMock = {} as ChildProcess; + const serverStateMock = { + logger: {}, + } as ServerState; + + beforeEach(async () => { + project = new HardhatProject( + serverStateMock, + "/my_hardhat_project", + "/my_hardhat_project/hardhat.config.ts" + ); + + project.workerProcess = workerProcessMock; + project.workerStatus = WorkerStatus.RUNNING; + }); + + describe("fileBelongs", function () { + describe("when initialization was correct and worker is operative", function () { + it("sends a FileBelongsRequest to the worker process", async () => { + // Make the worker respond "true" + (workerProcessMock as any).send = (request: FileBelongsRequest) => { + (project as any)._handleResponse(request.requestId, true); + }; + + assert.isTrue( + await project.fileBelongs( + `/my_hardhat_project/any_folder/contract.sol` + ) + ); + + // Make the worker respond "false" + (workerProcessMock as any).send = (request: FileBelongsRequest) => { + (project as any)._handleResponse(request.requestId, false); + }; + + assert.isFalse( + await project.fileBelongs( + `/my_hardhat_project/any_folder/contract.sol` + ) + ); + }); + }); + + describe("when worker is not operative", function () { + beforeEach(async () => { + project.workerStatus = WorkerStatus.ERRORED; + }); + + it("claims every contract under project basePath, to avoid it being assigned other project", async () => { + assert.isTrue( + await project.fileBelongs( + `/my_hardhat_project/any_folder/contract.sol` + ) + ); + + assert.isFalse( + await project.fileBelongs("/other_project/any_folder/contract.sol") + ); + }); + }); + }); +}); diff --git a/server/test/helpers/setupMockCompilerProcessFactory.ts b/server/test/helpers/setupMockCompilerProcessFactory.ts deleted file mode 100644 index 4d9bfdad..00000000 --- a/server/test/helpers/setupMockCompilerProcessFactory.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { HardhatProject } from "@analyzer/HardhatProject"; -import * as sinon from "sinon"; -import { - CompilerProcessFactory, - HardhatCompilerError, - ValidationFail, - WorkerProcess, -} from "../../src/types"; - -export function setupMockCompilerProcessFactory( - errors: HardhatCompilerError[] = [] -): CompilerProcessFactory { - return (project: HardhatProject): WorkerProcess => { - return { - project, - init: sinon.spy(), - validate: sinon.spy(() => { - const validationMessage: ValidationFail = { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_FAIL", - jobId: 1, - projectBasePath: project.basePath, - version: "9.9.9", - errors, - }; - - return validationMessage; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - }) as any, - invalidatePreprocessingCache: sinon.spy(() => { - return true; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - }) as any, - kill: sinon.spy(), - restart: sinon.spy(), - } as WorkerProcess; - }; -} diff --git a/server/test/helpers/setupMockLanguageServer.ts b/server/test/helpers/setupMockLanguageServer.ts index 40355f52..d881f75a 100644 --- a/server/test/helpers/setupMockLanguageServer.ts +++ b/server/test/helpers/setupMockLanguageServer.ts @@ -22,12 +22,8 @@ import { } from "vscode-languageserver/node"; import { getOrInitialiseSolFileEntry } from "@utils/getOrInitialiseSolFileEntry"; import { getUriFromDocument } from "../../src/utils/index"; -import setupServer, { - GetSolFileDetailsParams, - GetSolFileDetailsResponse, -} from "../../src/server"; -import type { HardhatCompilerError } from "../../src/types"; -import { setupMockCompilerProcessFactory } from "./setupMockCompilerProcessFactory"; +import setupServer from "../../src/server"; +import type { SolcError } from "../../src/types"; import { setupMockConnection } from "./setupMockConnection"; import { waitUntil } from "./waitUntil"; import { setupMockLogger } from "./setupMockLogger"; @@ -57,24 +53,19 @@ export type OnRenameRequest = ( params: RenameParams ) => WorkspaceEdit | undefined | null; export type OnHover = (params: HoverParams) => Hover | null; -export type OnRequest = ( - params: GetSolFileDetailsParams -) => GetSolFileDetailsResponse; export async function setupMockLanguageServer({ projects, documents, - errors, }: { projects?: { [key: string]: string[] }; documents: Array<{ uri: string; content?: string; analyze: boolean }>; - errors: HardhatCompilerError[]; + errors: SolcError[]; }) { const exampleRootUri = forceToUnixStyle(path.join(__dirname, "..")); const exampleWorkspaceFolders = [{ name: "example", uri: exampleRootUri }]; const mockConnection = setupMockConnection(); - const mockCompilerProcessFactory = setupMockCompilerProcessFactory(errors); const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( projects ?? {} ); @@ -84,7 +75,6 @@ export async function setupMockLanguageServer({ const serverState = await setupServer( // eslint-disable-next-line @typescript-eslint/no-explicit-any mockConnection as any, - mockCompilerProcessFactory, mockWorkspaceFileRetriever, mockTelemetry, mockLogger @@ -122,7 +112,6 @@ export async function setupMockLanguageServer({ const renameRequest: OnRenameRequest = mockConnection.onRenameRequest.getCall(0).firstArg; const hover: OnHover = mockConnection.onHover.getCall(0).firstArg; - const request: OnRequest = mockConnection.onRequest.getCall(0).args[1]; const didOpenTextDocument = mockConnection.onDidOpenTextDocument.getCall(0).firstArg; @@ -183,7 +172,6 @@ export async function setupMockLanguageServer({ implementation, renameRequest, hover, - request, }, }; } diff --git a/server/test/helpers/setupMockLogger.ts b/server/test/helpers/setupMockLogger.ts index 456c21f4..94ee0137 100644 --- a/server/test/helpers/setupMockLogger.ts +++ b/server/test/helpers/setupMockLogger.ts @@ -8,5 +8,6 @@ export function setupMockLogger(): Logger { info: sinon.spy(), error: sinon.spy(), trace: sinon.spy(), + trackTime: sinon.spy(), }; } diff --git a/server/test/helpers/setupMockWorkspaceFileRetriever.ts b/server/test/helpers/setupMockWorkspaceFileRetriever.ts index b1ac300b..76206337 100644 --- a/server/test/helpers/setupMockWorkspaceFileRetriever.ts +++ b/server/test/helpers/setupMockWorkspaceFileRetriever.ts @@ -1,6 +1,6 @@ -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; import { decodeUriAndRemoveFilePrefix } from "@utils/index"; import sinon from "sinon"; +import { WorkspaceFileRetriever } from "../../src/utils/WorkspaceFileRetriever"; export function setupMockWorkspaceFileRetriever( projects: { [key: string]: string[] } = {}, diff --git a/server/test/parser/analyzer.ts b/server/test/parser/analyzer.ts index 2a7e3d40..eea77c06 100644 --- a/server/test/parser/analyzer.ts +++ b/server/test/parser/analyzer.ts @@ -1,16 +1,16 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ import * as path from "path"; import { assert } from "chai"; import { IndexFileData } from "@common/event"; import { indexWorkspaceFolders } from "@services/initialization/indexWorkspaceFolders"; import { Connection } from "vscode-languageserver"; -import { HardhatProject } from "@analyzer/HardhatProject"; import { forceToUnixStyle } from "../helpers/forceToUnixStyle"; import { setupMockLogger } from "../helpers/setupMockLogger"; describe("Analyzer", () => { describe("indexing", () => { const exampleRootPath = forceToUnixStyle(__dirname); - let collectedData: Array<[string, IndexFileData]>; + let collectedData: Array<[string, IndexFileData | undefined]>; let foundSolFiles: string[]; describe("with multiple files", () => { @@ -21,45 +21,11 @@ describe("Analyzer", () => { await runIndexing(exampleRootPath, foundSolFiles, collectedData); }); - it("should emit an indexing event for each", () => { - assert.equal(collectedData.length, 4); + it("should emit an indexing-start event", () => { + assert.equal(collectedData.length, 2); assert.deepEqual(collectedData, [ - [ - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - }, - ], - [ - "custom/indexing-file", - { - jobId: 1, - path: forceToUnixStyle(path.join(__dirname, "example1.sol")), - current: 1, - total: 3, - }, - ], - [ - "custom/indexing-file", - { - jobId: 1, - path: forceToUnixStyle(path.join(__dirname, "example2.sol")), - current: 2, - total: 3, - }, - ], - [ - "custom/indexing-file", - { - jobId: 1, - path: forceToUnixStyle(path.join(__dirname, "example3.sol")), - current: 3, - total: 3, - }, - ], + ["custom/indexing-start", undefined], + ["custom/indexing-end", undefined], ]); }); }); @@ -72,27 +38,11 @@ describe("Analyzer", () => { await runIndexing(exampleRootPath, foundSolFiles, collectedData); }); - it("should emit an indexing event for each", () => { + it("should emit an indexing-start event", () => { assert.equal(collectedData.length, 2); assert.deepEqual(collectedData, [ - [ - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - }, - ], - [ - "custom/indexing-file", - { - jobId: 1, - path: "", - current: 0, - total: 0, - }, - ], + ["custom/indexing-start", undefined], + ["custom/indexing-end", undefined], ]); }); }); @@ -102,17 +52,11 @@ describe("Analyzer", () => { async function runIndexing( rootPath: string, foundSolFiles: string[], - collectedData: Array<[string, IndexFileData]> + collectedData: Array<[string, IndexFileData | undefined]> ) { const exampleWorkspaceFolder = { name: "example", uri: rootPath }; - const exampleProjects = { - [rootPath]: new HardhatProject( - exampleWorkspaceFolder.uri, - path.join(exampleWorkspaceFolder.uri, "hardhat.config.ts"), - exampleWorkspaceFolder - ), - }; + const exampleProjects = {}; const solFileIndex = {}; const mockLogger = setupMockLogger(); @@ -146,8 +90,8 @@ async function runIndexing( solFileIndex, projects: exampleProjects, logger: mockLogger, - workspaceFolders: [], - }, + indexedWorkspaceFolders: [], + } as any, mockWorkspaceFileRetriever, [exampleWorkspaceFolder] ); diff --git a/server/test/server.ts b/server/test/server.ts index b380a7c6..0896783e 100644 --- a/server/test/server.ts +++ b/server/test/server.ts @@ -1,6 +1,5 @@ import assert from "assert"; import setupServer from "../src/server"; -import { setupMockCompilerProcessFactory } from "./helpers/setupMockCompilerProcessFactory"; import { setupMockConnection } from "./helpers/setupMockConnection"; import { setupMockLogger } from "./helpers/setupMockLogger"; import { setupMockTelemetry } from "./helpers/setupMockTelemetry"; @@ -15,7 +14,6 @@ describe("Solidity Language Server", () => { before(async () => { mockConnection = setupMockConnection(); - const mockCompilerProcessFactory = setupMockCompilerProcessFactory(); const mockLogger = setupMockLogger(); const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever(); const mockTelemetry = setupMockTelemetry(); @@ -23,7 +21,6 @@ describe("Solidity Language Server", () => { await setupServer( // eslint-disable-next-line @typescript-eslint/no-explicit-any mockConnection as any, - mockCompilerProcessFactory, mockWorkspaceFileRetriever, mockTelemetry, mockLogger @@ -33,7 +30,7 @@ describe("Solidity Language Server", () => { const initialize = mockConnection.onInitialize.getCall(0).firstArg; assert(initialize); - const initializeResponse = initialize({ + const initializeResponse = await initialize({ rootUri: null, capabilities: {}, }); diff --git a/server/test/services/codeactions/asserts/assertCodeAction.ts b/server/test/services/codeactions/asserts/assertCodeAction.ts index 16cfb4f4..6265e032 100644 --- a/server/test/services/codeactions/asserts/assertCodeAction.ts +++ b/server/test/services/codeactions/asserts/assertCodeAction.ts @@ -33,16 +33,16 @@ export async function assertCodeAction( const serverState = { indexJobCount: 0, - workspaceFolders: [] as WorkspaceFolder[], + indexedWorkspaceFolders: [] as WorkspaceFolder[], projects: {}, connection: mockConnection, solFileIndex: {}, logger: mockLogger, - }; + } as unknown as ServerState; const solFileEntry = getOrInitialiseSolFileEntry(serverState, exampleUri); - analyzeSolFile(serverState, solFileEntry, docText); + await analyzeSolFile(serverState, solFileEntry, docText); const actions = compilerDiagnostic.resolveActions( serverState as ServerState, diff --git a/server/test/services/codeactions/markContractAbstract.ts b/server/test/services/codeactions/markContractAbstract.ts index 52890f44..90d0716e 100644 --- a/server/test/services/codeactions/markContractAbstract.ts +++ b/server/test/services/codeactions/markContractAbstract.ts @@ -144,17 +144,17 @@ describe("Code Actions", () => { const serverState = { indexJobCount: 0, - workspaceFolders: [{ name: "example", uri: exampleUri }], + indexedWorkspaceFolders: [{ name: "example", uri: exampleUri }], projects: {}, connection: mockConnection, solFileIndex: {}, logger: mockLogger, - }; + } as unknown as ServerState; await indexWorkspaceFolders( serverState, mockWorkspaceFileRetriever, - serverState.workspaceFolders + serverState.indexedWorkspaceFolders ); const actions = markContractAbstract.resolveActions( diff --git a/server/test/services/completion/imports.ts b/server/test/services/completion/imports.ts index dbee9aa5..0c801b40 100644 --- a/server/test/services/completion/imports.ts +++ b/server/test/services/completion/imports.ts @@ -98,42 +98,6 @@ describe("Parser", () => { errors: [], })); }); - - it("should list sol files under a package folder in node_modules", () => - assertImportCompletion( - completion, - importsUri, - { line: 3, character: 8 }, - [ - { - label: "./Second.sol", - insertText: "./Second.sol", - kind: CompletionItemKind.File, - }, - { - label: "./Third.sol", - insertText: "./Third.sol", - kind: CompletionItemKind.File, - }, - { - label: "./sub", - insertText: "./sub", - kind: CompletionItemKind.Folder, - }, - { - label: "@openzeppelin", - kind: CompletionItemKind.Folder, - }, - { - label: "@ens", - kind: CompletionItemKind.Folder, - }, - ], - { - triggerKind: 2, - triggerCharacter: "/", - } - )); }); describe("relative", () => { @@ -625,148 +589,6 @@ describe("Parser", () => { errors: [], })); }); - - it("should list sol files under a package folder in node_modules", () => - assertImportCompletion( - completion, - importsUri, - { line: 13, character: 22 }, - [ - { - label: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - kind: CompletionItemKind.Module, - textEdit: { - newText: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - range: { - start: { - line: 13, - character: 8, - }, - end: { - line: 13, - character: 22, - }, - }, - }, - }, - { - label: "@openzeppelin/contracts/token/ERC20/IERC20.sol", - kind: CompletionItemKind.Module, - textEdit: { - newText: "@openzeppelin/contracts/token/ERC20/IERC20.sol", - range: { - start: { - line: 13, - character: 8, - }, - end: { - line: 13, - character: 22, - }, - }, - }, - }, - ], - { - triggerKind: 2, - triggerCharacter: "/", - } - )); - - it("should list sol files under a package folder in node_modules when partially complete", () => - assertImportCompletion( - completion, - importsUri, - { line: 14, character: 12 }, - [ - { - label: "@openzeppelin", - kind: CompletionItemKind.Module, - textEdit: { - newText: "@openzeppelin", - range: { - start: { - line: 14, - character: 8, - }, - end: { - line: 14, - character: 12, - }, - }, - }, - }, - ], - { - triggerKind: 1, - } - )); - - it("should list sol files under a package subfolder in node_modules", () => - assertImportCompletion( - completion, - importsUri, - { line: 15, character: 54 }, - [ - { - label: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - kind: CompletionItemKind.Module, - textEdit: { - newText: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - range: { - start: { - line: 15, - character: 8, - }, - end: { - line: 15, - character: 54, - }, - }, - }, - }, - ], - { - triggerKind: 2, - triggerCharacter: "/", - } - )); - - it("should list sol files under a package subfolder in node_modules when partially complete", () => - assertImportCompletion( - completion, - importsUri, - { line: 16, character: 49 }, - [ - { - label: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - kind: CompletionItemKind.Module, - textEdit: { - newText: - "@openzeppelin/contracts/token/ERC1155/presets/ERC1155PresetMinterPauser.sol", - range: { - start: { - line: 16, - character: 8, - }, - end: { - line: 16, - character: 49, - }, - }, - }, - }, - ], - { - triggerKind: 2, - triggerCharacter: "/", - } - )); }); }); }); diff --git a/server/test/services/documentation/documentation.ts b/server/test/services/documentation/documentation.ts index 5223da5f..b554125f 100644 --- a/server/test/services/documentation/documentation.ts +++ b/server/test/services/documentation/documentation.ts @@ -1,6 +1,7 @@ import { assert } from "chai"; import * as path from "path"; import { SignatureHelp } from "vscode-languageserver/node"; +import { sleep } from "../../../src/utils/sleep"; import { forceToUnixStyle } from "../../helpers/forceToUnixStyle"; import { setupMockLanguageServer, @@ -24,6 +25,7 @@ describe("Parser", () => { }); it("should return signature info", async () => { + await sleep(500); const response = (await signatureHelp({ textDocument: { uri: basicUri }, position: { line: 21, character: 21 }, diff --git a/server/test/services/documents/onDidChangeWatchedFiles.ts b/server/test/services/documents/onDidChangeWatchedFiles.ts index a358415e..14919e6a 100644 --- a/server/test/services/documents/onDidChangeWatchedFiles.ts +++ b/server/test/services/documents/onDidChangeWatchedFiles.ts @@ -1,218 +1,15 @@ -import { NoProject } from "@analyzer/NoProject"; -import { ClientTrackingState, ISolFileEntry } from "@common/types"; -import { onDidChangeWatchedFiles } from "@services/documents/onDidChangeWatchedFiles"; -import { assert } from "chai"; import sinon from "sinon"; -import { FileChangeType } from "vscode-languageserver-protocol"; import { ServerState } from "../../../src/types"; import { setupMockLogger } from "../../helpers/setupMockLogger"; import { setupMockTelemetry } from "../../helpers/setupMockTelemetry"; describe("On did change watched files", () => { - describe("change to hardhat config file", () => { - it("should restart the worker", async () => { - const mockWorkerProcess = { - restart: sinon.spy(), - }; - - const serverState = setupServerState(mockWorkerProcess); - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [{ type: 1, uri: "/projects/example/hardhat.config.ts" }], - }); - - assert.deepStrictEqual(response, true); - assert(mockWorkerProcess.restart.called); - }); - - it("should gracefully fail if no project for config file", async () => { - const serverState = setupServerState(); - serverState.projects = {}; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [{ type: 1, uri: "/projects/js-example/hardhat.config.js" }], - }); - - assert.deepStrictEqual(response, false); - }); - - it("should gracefully fail if no worker process for config file", async () => { - const serverState = setupServerState(); - serverState.workerProcesses = {}; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [{ type: 1, uri: "/projects/example/hardhat.config.ts" }], - }); - - assert.deepStrictEqual(response, false); - }); - - it("should gracefully fail on an unexpected exception", async () => { - const serverState = setupServerState(); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - serverState.projects["/projects/example"] = undefined as any; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [{ type: 1, uri: "/projects/example/hardhat.config.ts" }], - }); - - assert.deepStrictEqual(response, false); - }); - }); - - describe("change to solidity file", () => { - let exampleSolFileEntry: ISolFileEntry; - - beforeEach(() => { - exampleSolFileEntry = { - project: { - type: "hardhat", - configPath: "/projects/example/hardhat.config.ts", - basePath: "/projects/example", - }, - tracking: ClientTrackingState.UNTRACKED, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; - }); - - describe("that is untracked", () => { - it("should invalidate the preprocessing cache on the worker", async () => { - const mockWorkerProcess = { - invalidatePreprocessingCache: sinon.spy(() => true), - }; - - const serverState = setupServerState(mockWorkerProcess); - - serverState.solFileIndex = { - "/projects/example/contracts/a-solidity-file.sol": - exampleSolFileEntry, - }; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { - type: FileChangeType.Changed, - uri: "/projects/example/contracts/a-solidity-file.sol", - }, - ], - }); - - assert.deepStrictEqual(response, true); - assert(mockWorkerProcess.invalidatePreprocessingCache.called); - }); - - it("should gracefully fail if no entry for the uri", async () => { - const serverState = setupServerState(); - - serverState.solFileIndex = { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - "/projects/example/contracts/a-solidity-file.sol": undefined as any, - }; - - serverState.workerProcesses = {}; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { type: 1, uri: "/projects/example/contracts/a-solidity-file.sol" }, - ], - }); - - assert.deepStrictEqual(response, false); - }); - - it("should gracefully fail if not in a hardhat project", async () => { - const serverState = setupServerState(); - - serverState.solFileIndex = { - "/projects/example/contracts/a-solidity-file.sol": { - ...exampleSolFileEntry, - project: new NoProject(), - }, - }; - - serverState.workerProcesses = {}; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { type: 1, uri: "/projects/example/contracts/a-solidity-file.sol" }, - ], - }); - - assert.deepStrictEqual(response, false); - }); - - it("should gracefully fail if no worker process for config file", async () => { - const serverState = setupServerState(); - - serverState.solFileIndex = { - "/projects/example/contracts/a-solidity-file.sol": - exampleSolFileEntry, - }; - - serverState.workerProcesses = {}; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { type: 1, uri: "/projects/example/contracts/a-solidity-file.sol" }, - ], - }); - - assert.deepStrictEqual(response, false); - }); - - it("should gracefully fail on an unexpected exception", async () => { - const serverState = setupServerState(); - - serverState.solFileIndex = { - "/projects/example/contracts/a-solidity-file.sol": - exampleSolFileEntry, - }; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - serverState.projects["/projects/example"] = undefined as any; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { type: 1, uri: "/projects/example/contracts/a-solidity-file.sol" }, - ], - }); - - assert.deepStrictEqual(response, false); - }); - }); - - describe("that is tracked", () => { - it("should be ignored", async () => { - const mockWorkerProcess = { - invalidatePreprocessingCache: sinon.spy(() => true), - }; - - const serverState = setupServerState(mockWorkerProcess); - - serverState.solFileIndex = { - "/projects/example/contracts/a-solidity-file.sol": { - ...exampleSolFileEntry, - tracking: ClientTrackingState.TRACKED, - }, - }; - - const [response] = await onDidChangeWatchedFiles(serverState)({ - changes: [ - { - type: FileChangeType.Changed, - uri: "/projects/example/contracts/a-solidity-file.sol", - }, - ], - }); - - assert.deepStrictEqual(response, false); - assert(mockWorkerProcess.invalidatePreprocessingCache.notCalled); - }); - }); + it("should call projects callback", async () => { + // TODO }); }); -function setupServerState(mockWorkerProcess?: { +function _setupServerState(mockWorkerProcess?: { restart?: () => void; invalidatePreprocessingCache?: () => void; }): ServerState { diff --git a/server/test/services/documents/tracking.ts b/server/test/services/documents/tracking.ts deleted file mode 100644 index 04009905..00000000 --- a/server/test/services/documents/tracking.ts +++ /dev/null @@ -1,209 +0,0 @@ -import { assert } from "chai"; -import { onDidOpen } from "@services/documents/onDidOpen"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import { TextDocuments } from "vscode-languageserver/node"; -import { ClientTrackingState } from "@common/types"; -import { onDidChangeContent } from "@services/documents/onDidChangeContent"; -import { onDidClose } from "@services/documents/onDidClose"; -import { onDidSave } from "@services/documents/onDidSave"; -import { ServerState } from "../../../src/types"; -import { setupMockCompilerProcessFactory } from "../../helpers/setupMockCompilerProcessFactory"; -import { setupMockConnection } from "../../helpers/setupMockConnection"; -import { setupMockLogger } from "../../helpers/setupMockLogger"; -import { setupMockTelemetry } from "../../helpers/setupMockTelemetry"; - -describe("documents", () => { - describe("tracking", () => { - describe("on open", () => { - it("sets tracking as on", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create("/example/file.sol", "solidity", 0, ""), - }; - - onDidOpen(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/file.sol"]; - - assert.equal(fileEntry.tracking, ClientTrackingState.TRACKED); - }); - - it("ignores non-solidity files", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create( - "/example/not-solidity.js", - "javascript", - 0, - "// ignore" - ), - }; - - onDidOpen(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/not-solidity.js"]; - - assert.isUndefined(fileEntry); - }); - }); - - describe("on change", () => { - it("sets tracking as on", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create("/example/file.sol", "solidity", 0, ""), - }; - - onDidOpen(serverState)(change); - onDidChangeContent(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/file.sol"]; - - assert.equal(fileEntry.tracking, ClientTrackingState.TRACKED); - }); - - it("logs and ignores on error", () => { - const serverState: ServerState = setupServerState(); - - serverState.logger.trace = () => { - throw new Error("Unexpected"); - }; - - const change = { - document: TextDocument.create("/example/file.sol", "solidity", 0, ""), - }; - - onDidChangeContent(serverState)(change); - - assert( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (serverState.logger.error as any).calledOnce, - "error was not logged" - ); - }); - - it("ignores non-solidity files", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create( - "/example/not-solidity.js", - "javascript", - 0, - "// ignore" - ), - }; - - onDidChangeContent(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/not-solidity.js"]; - - assert.isUndefined(fileEntry); - }); - }); - - describe("on close", () => { - it("sets tracking as on", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create("/example/file.sol", "solidity", 0, ""), - }; - - onDidOpen(serverState)(change); - onDidChangeContent(serverState)(change); - onDidClose(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/file.sol"]; - - assert.equal(fileEntry.tracking, ClientTrackingState.UNTRACKED); - }); - - it("ignores non-solidity files", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create( - "/example/not-solidity.js", - "javascript", - 0, - "// ignore" - ), - }; - - onDidClose(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/not-solidity.js"]; - - assert.isUndefined(fileEntry); - }); - }); - - describe("on save", () => { - it("sets tracking as on", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create("/example/file.sol", "solidity", 0, ""), - }; - - onDidSave(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/file.sol"]; - - assert.equal(fileEntry.tracking, ClientTrackingState.TRACKED); - }); - - it("ignores non-solidity files", () => { - const serverState: ServerState = setupServerState(); - - const change = { - document: TextDocument.create( - "/example/not-solidity.js", - "javascript", - 0, - "// ignore" - ), - }; - - onDidSave(serverState)(change); - - const fileEntry = serverState.solFileIndex["/example/not-solidity.js"]; - - assert.isUndefined(fileEntry); - }); - }); - }); -}); - -function setupServerState(): ServerState { - const mockConnection = setupMockConnection(); - const mockTelemetry = setupMockTelemetry(); - const compProcessFactory = setupMockCompilerProcessFactory(); - const logger = setupMockLogger(); - - return { - env: "production", - hasWorkspaceFolderCapability: true, - - globalTelemetryEnabled: false, - hardhatTelemetryEnabled: false, - indexJobCount: 0, - - compProcessFactory, - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - connection: mockConnection as any, - documents: new TextDocuments(TextDocument), - workspaceFolders: [], - projects: {}, - solFileIndex: {}, - workerProcesses: {}, - - telemetry: mockTelemetry, - logger, - }; -} diff --git a/server/test/services/initialization/indexWorkspaceFolders.ts b/server/test/services/initialization/indexWorkspaceFolders.ts index 8e474e2e..1f42cce7 100644 --- a/server/test/services/initialization/indexWorkspaceFolders.ts +++ b/server/test/services/initialization/indexWorkspaceFolders.ts @@ -1,662 +1,676 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -import { WorkspaceFileRetriever } from "@analyzer/WorkspaceFileRetriever"; -import { - indexWorkspaceFolders, - IndexWorkspaceFoldersContext, -} from "@services/initialization/indexWorkspaceFolders"; -import { assert } from "chai"; -import * as sinon from "sinon"; -import { WorkspaceFolder } from "vscode-languageserver"; -import { setupMockConnection } from "../../helpers/setupMockConnection"; -import { setupMockLogger } from "../../helpers/setupMockLogger"; -import { setupMockWorkspaceFileRetriever } from "../../helpers/setupMockWorkspaceFileRetriever"; - -describe("initialization", () => { - describe("indexing workspace folders", () => { - describe("adding single workspace with projects and sol files", () => { - let serverState: IndexWorkspaceFoldersContext; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - addedFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( - { - "/data/example": ["/data/example/hardhat.config.ts"], - }, - { - "/data/example": ["/data/example/contracts/one.sol"], - } - ); - - serverState = buildServerState({ existingFolders: [] }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should add a new workspace folder ", () => { - assert.deepStrictEqual(serverState.workspaceFolders, addedFolders); - }); - - it("should add projects", () => { - assert.deepStrictEqual(serverState.projects, { - "/data/example": { - basePath: "/data/example", - configPath: "/data/example/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "example", - uri: "file:///data/example", - }, - }, - }); - }); - - it("should add solidity files", () => { - assert("/data/example/contracts/one.sol" in serverState.solFileIndex); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { - jobId: 1, - path: "/data/example/contracts/one.sol", - current: 1, - total: 1, - } - ); - }); - }); - - describe("adding single workspace with multiple projects", () => { - let serverState: IndexWorkspaceFoldersContext; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - addedFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( - { - "/data/example": [ - "/data/example/packages/first/hardhat.config.ts", - "/data/example/packages/second/hardhat.config.ts", - ], - }, - { - "/data/example": [ - "/data/example/packages/first/contracts/A.sol", - "/data/example/packages/first/contracts/B.sol", - "/data/example/packages/second/contracts/C.sol", - "/data/example/packages/second/contracts/D.sol", - ], - } - ); - - serverState = buildServerState({ existingFolders: [] }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should add a new workspace folder ", () => { - assert.deepStrictEqual(serverState.workspaceFolders, addedFolders); - }); - - it("should add multiple projects", () => { - assert.deepStrictEqual(serverState.projects, { - "/data/example/packages/first": { - basePath: "/data/example/packages/first", - configPath: "/data/example/packages/first/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "example", - uri: "file:///data/example", - }, - }, - "/data/example/packages/second": { - basePath: "/data/example/packages/second", - configPath: "/data/example/packages/second/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "example", - uri: "file:///data/example", - }, - }, - }); - }); - - it("should add solidity files", () => { - assert( - "/data/example/packages/first/contracts/A.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/first/contracts/B.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/second/contracts/C.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/second/contracts/D.sol" in - serverState.solFileIndex - ); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWith( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { - jobId: 1, - path: "/data/example/packages/second/contracts/D.sol", - current: 4, - total: 4, - } - ); - }); - }); - - describe("adding multiple workspaces with projects and sol files", () => { - let serverState: IndexWorkspaceFoldersContext; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - addedFolders = [ - { - name: "first", - uri: "file:///data/example/packages/first", - }, - { - name: "second", - uri: "file:///data/example/packages/second", - }, - { - name: "third", - uri: "file:///data/example/packages/third", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( - { - "/data/example/packages/first": [ - "/data/example/packages/first/hardhat.config.ts", - ], - "/data/example/packages/second": [ - "/data/example/packages/second/hardhat.config.js", - ], - "/data/example/packages/third": [ - "/data/example/packages/third/hardhat.config.ts", - ], - }, - { - "/data/example/packages/first": [ - "/data/example/packages/first/contracts/A.sol", - "/data/example/packages/first/contracts/B.sol", - ], - "/data/example/packages/second": [ - "/data/example/packages/second/contracts/C.sol", - "/data/example/packages/second/contracts/D.sol", - ], - "/data/example/packages/third": [ - "/data/example/packages/third/contracts/E.sol", - "/data/example/packages/third/contracts/F.sol", - ], - } - ); - - serverState = buildServerState({ existingFolders: [] }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should add multiple workspace folders", () => { - assert.deepStrictEqual(serverState.workspaceFolders, addedFolders); - }); - - it("should add multiple projects", () => { - assert.deepStrictEqual(serverState.projects, { - "/data/example/packages/first": { - basePath: "/data/example/packages/first", - configPath: "/data/example/packages/first/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "first", - uri: "file:///data/example/packages/first", - }, - }, - "/data/example/packages/second": { - basePath: "/data/example/packages/second", - configPath: "/data/example/packages/second/hardhat.config.js", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "second", - uri: "file:///data/example/packages/second", - }, - }, - "/data/example/packages/third": { - basePath: "/data/example/packages/third", - configPath: "/data/example/packages/third/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "third", - uri: "file:///data/example/packages/third", - }, - }, - }); - }); - - it("should add solidity files", () => { - assert( - "/data/example/packages/first/contracts/A.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/first/contracts/B.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/second/contracts/C.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/second/contracts/D.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/third/contracts/E.sol" in - serverState.solFileIndex - ); - assert( - "/data/example/packages/third/contracts/F.sol" in - serverState.solFileIndex - ); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWith( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { - jobId: 1, - path: "/data/example/packages/third/contracts/F.sol", - current: 6, - total: 6, - } - ); - }); - }); - - describe("adding single workspace with no projects or sol files", () => { - let serverState: IndexWorkspaceFoldersContext; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - addedFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever(); - - serverState = buildServerState({ existingFolders: [] }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should add a new workspace folder ", () => { - assert.deepStrictEqual(serverState.workspaceFolders, addedFolders); - }); - - it("should not add any projects", () => { - assert.deepStrictEqual(serverState.projects, {}); - }); - - it("should not add any solidity files", () => { - assert.deepStrictEqual(serverState.solFileIndex, {}); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { jobId: 1, path: "", current: 0, total: 0 } - ); - }); - }); - - describe("adding single workspace that has been previously indexed", () => { - let serverState: IndexWorkspaceFoldersContext; - let existingFolders: WorkspaceFolder[]; - let addedFolders: WorkspaceFolder[]; - let mockWorkspaceFileRetriever: WorkspaceFileRetriever; - - before(async () => { - existingFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - addedFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever(); - - serverState = buildServerState({ existingFolders }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should not change the existing folders", () => { - assert.deepStrictEqual(serverState.workspaceFolders, existingFolders); - }); - - it("should not scan for projects or sol files", () => { - sinon.assert.notCalled(mockWorkspaceFileRetriever.findFiles as any); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { jobId: 1, path: "", current: 0, total: 0 } - ); - }); - }); - - describe("adding two workspaces, one nested within the other", () => { - let serverState: IndexWorkspaceFoldersContext; - let existingFolders: WorkspaceFolder[]; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - existingFolders = []; - - addedFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - { - name: "sub", - uri: "file:///data/example/sub", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( - { - "/data/example": ["/data/example/hardhat.config.ts"], - }, - { - "/data/example": ["/data/example/contracts/one.sol"], - } - ); - - serverState = buildServerState({ existingFolders }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should add only the top level workspace", () => { - assert.deepStrictEqual(serverState.workspaceFolders, [ - { - name: "example", - uri: "file:///data/example", - }, - ]); - }); - - it("should add hardhat project", () => { - assert.deepStrictEqual(serverState.projects, { - "/data/example": { - basePath: "/data/example", - configPath: "/data/example/hardhat.config.ts", - type: "hardhat", - remappings: [], - workspaceFolder: { - name: "example", - uri: "file:///data/example", - }, - }, - }); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWith( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { - jobId: 1, - path: "/data/example/contracts/one.sol", - current: 1, - total: 1, - } - ); - }); - }); - - describe("adding a workspace that is nested in already indexed workspaces", () => { - let serverState: IndexWorkspaceFoldersContext; - let existingFolders: WorkspaceFolder[]; - let addedFolders: WorkspaceFolder[]; - - before(async () => { - existingFolders = [ - { - name: "example", - uri: "file:///data/example", - }, - ]; - - addedFolders = [ - { - name: "sub", - uri: "file:///data/example/sub", - }, - ]; - - const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( - { - "/data/example": ["/data/example/hardhat.config.ts"], - }, - { - "/data/example": ["/data/example/contracts/one.sol"], - } - ); - - serverState = buildServerState({ existingFolders }); - - await indexWorkspaceFolders( - serverState, - mockWorkspaceFileRetriever, - addedFolders - ); - }); - - it("should not change the workspace folders", () => { - assert.deepStrictEqual(serverState.workspaceFolders, existingFolders); - }); - - it("should not add any projects", () => { - assert.deepStrictEqual(serverState.projects, {}); - }); - - it("should not add any solidity files", () => { - assert.deepStrictEqual(serverState.solFileIndex, {}); - }); - - it("should notify the client of indexing starting", () => { - sinon.assert.calledWithExactly( - serverState.connection.sendNotification as any, - "custom/indexing-start", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - - it("should notify the client of indexing finishing", () => { - sinon.assert.calledWith( - serverState.connection.sendNotification as any, - "custom/indexing-file", - { - jobId: 1, - path: "", - current: 0, - total: 0, - } - ); - }); - }); - }); -}); - -function buildServerState({ - existingFolders, -}: { - existingFolders: WorkspaceFolder[]; -}): IndexWorkspaceFoldersContext { - const mockConnection = setupMockConnection(); - const mockLogger = setupMockLogger(); - - return { - indexJobCount: 0, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - connection: mockConnection as any, - solFileIndex: {}, - workspaceFolders: existingFolders, - projects: {}, - logger: mockLogger, - }; -} +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// import { indexWorkspaceFolders } from "@services/initialization/indexWorkspaceFolders"; +// import { assert } from "chai"; +// import * as sinon from "sinon"; +// import { WorkspaceFolder } from "vscode-languageserver"; +// import { ServerState } from "../../../src/types"; +// import { setupMockConnection } from "../../helpers/setupMockConnection"; +// import { setupMockLogger } from "../../helpers/setupMockLogger"; +// import { setupMockWorkspaceFileRetriever } from "../../helpers/setupMockWorkspaceFileRetriever"; + +// describe("initialization", () => { +// describe("indexing workspace folders", () => { +// describe("adding single workspace with projects and sol files", () => { +// let serverState: ServerState; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// addedFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( +// { +// "/data/example": ["/data/example/hardhat.config.ts"], +// }, +// { +// "/data/example": ["/data/example/contracts/one.sol"], +// } +// ); + +// serverState = buildServerState({ existingFolders: [] }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should add a new workspace folder ", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// addedFolders +// ); +// }); + +// it("should add projects", () => { +// assert.deepStrictEqual(serverState.projects, { +// "/data/example": { +// basePath: "/data/example", +// configPath: "/data/example/hardhat.config.ts", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "example", +// uri: "file:///data/example", +// }, +// }, +// }); +// }); + +// it("should add solidity files", () => { +// assert("/data/example/contracts/one.sol" in serverState.solFileIndex); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { +// jobId: 1, +// path: "/data/example/contracts/one.sol", +// current: 1, +// total: 1, +// } +// ); +// }); +// }); + +// describe("adding single workspace with multiple projects", () => { +// let serverState: ServerState; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// addedFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( +// { +// "/data/example": [ +// "/data/example/packages/first/hardhat.config.ts", +// "/data/example/packages/second/hardhat.config.ts", +// ], +// }, +// { +// "/data/example": [ +// "/data/example/packages/first/contracts/A.sol", +// "/data/example/packages/first/contracts/B.sol", +// "/data/example/packages/second/contracts/C.sol", +// "/data/example/packages/second/contracts/D.sol", +// ], +// } +// ); + +// serverState = buildServerState({ existingFolders: [] }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should add a new workspace folder ", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// addedFolders +// ); +// }); + +// it("should add multiple projects", () => { +// assert.deepStrictEqual(serverState.projects, { +// "/data/example/packages/first": { +// basePath: "/data/example/packages/first", +// configPath: "/data/example/packages/first/hardhat.config.ts", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "example", +// uri: "file:///data/example", +// }, +// }, +// "/data/example/packages/second": { +// basePath: "/data/example/packages/second", +// configPath: "/data/example/packages/second/hardhat.config.ts", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "example", +// uri: "file:///data/example", +// }, +// }, +// }); +// }); + +// it("should add solidity files", () => { +// assert( +// "/data/example/packages/first/contracts/A.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/first/contracts/B.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/second/contracts/C.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/second/contracts/D.sol" in +// serverState.solFileIndex +// ); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWith( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { +// jobId: 1, +// path: "/data/example/packages/second/contracts/D.sol", +// current: 4, +// total: 4, +// } +// ); +// }); +// }); + +// describe("adding multiple workspaces with projects and sol files", () => { +// let serverState: ServerState; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// addedFolders = [ +// { +// name: "first", +// uri: "file:///data/example/packages/first", +// }, +// { +// name: "second", +// uri: "file:///data/example/packages/second", +// }, +// { +// name: "third", +// uri: "file:///data/example/packages/third", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( +// { +// "/data/example/packages/first": [ +// "/data/example/packages/first/hardhat.config.ts", +// ], +// "/data/example/packages/second": [ +// "/data/example/packages/second/hardhat.config.js", +// ], +// "/data/example/packages/third": [ +// "/data/example/packages/third/hardhat.config.ts", +// ], +// }, +// { +// "/data/example/packages/first": [ +// "/data/example/packages/first/contracts/A.sol", +// "/data/example/packages/first/contracts/B.sol", +// ], +// "/data/example/packages/second": [ +// "/data/example/packages/second/contracts/C.sol", +// "/data/example/packages/second/contracts/D.sol", +// ], +// "/data/example/packages/third": [ +// "/data/example/packages/third/contracts/E.sol", +// "/data/example/packages/third/contracts/F.sol", +// ], +// } +// ); + +// serverState = buildServerState({ existingFolders: [] }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should add multiple workspace folders", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// addedFolders +// ); +// }); + +// it("should add multiple projects", () => { +// assert.deepStrictEqual(serverState.projects, { +// "/data/example/packages/first": { +// basePath: "/data/example/packages/first", +// configPath: "/data/example/packages/first/hardhat.config.ts", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "first", +// uri: "file:///data/example/packages/first", +// }, +// }, +// "/data/example/packages/second": { +// basePath: "/data/example/packages/second", +// configPath: "/data/example/packages/second/hardhat.config.js", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "second", +// uri: "file:///data/example/packages/second", +// }, +// }, +// "/data/example/packages/third": { +// basePath: "/data/example/packages/third", +// configPath: "/data/example/packages/third/hardhat.config.ts", +// type: "hardhat", +// remappings: [], +// workspaceFolder: { +// name: "third", +// uri: "file:///data/example/packages/third", +// }, +// }, +// }); +// }); + +// it("should add solidity files", () => { +// assert( +// "/data/example/packages/first/contracts/A.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/first/contracts/B.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/second/contracts/C.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/second/contracts/D.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/third/contracts/E.sol" in +// serverState.solFileIndex +// ); +// assert( +// "/data/example/packages/third/contracts/F.sol" in +// serverState.solFileIndex +// ); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWith( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { +// jobId: 1, +// path: "/data/example/packages/third/contracts/F.sol", +// current: 6, +// total: 6, +// } +// ); +// }); +// }); + +// describe("adding single workspace with no projects or sol files", () => { +// let serverState: ServerState; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// addedFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever(); + +// serverState = buildServerState({ existingFolders: [] }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should add a new workspace folder ", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// addedFolders +// ); +// }); + +// it("should not add any projects", () => { +// assert.deepStrictEqual(serverState.projects, {}); +// }); + +// it("should not add any solidity files", () => { +// assert.deepStrictEqual(serverState.solFileIndex, {}); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { jobId: 1, path: "", current: 0, total: 0 } +// ); +// }); +// }); + +// describe("adding single workspace that has been previously indexed", () => { +// let serverState: ServerState; +// let existingFolders: WorkspaceFolder[]; +// let addedFolders: WorkspaceFolder[]; +// let mockWorkspaceFileRetriever: WorkspaceFileRetriever; + +// before(async () => { +// existingFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// addedFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever(); + +// serverState = buildServerState({ existingFolders }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should not change the existing folders", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// existingFolders +// ); +// }); + +// it("should not scan for projects or sol files", () => { +// sinon.assert.notCalled(mockWorkspaceFileRetriever.findFiles as any); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { jobId: 1, path: "", current: 0, total: 0 } +// ); +// }); +// }); + +// describe("adding two workspaces, one nested within the other", () => { +// let serverState: ServerState; +// let existingFolders: WorkspaceFolder[]; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// existingFolders = []; + +// addedFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// { +// name: "sub", +// uri: "file:///data/example/sub", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( +// { +// "/data/example": ["/data/example/hardhat.config.ts"], +// }, +// { +// "/data/example": ["/data/example/contracts/one.sol"], +// } +// ); + +// serverState = buildServerState({ existingFolders }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should add only the top level workspace", () => { +// assert.deepStrictEqual(serverState.indexedWorkspaceFolders, [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]); +// }); + +// it("should add hardhat project", () => { +// assert.deepStrictEqual(serverState.projects, { +// "/data/example": { +// basePath: "/data/example", +// configPath: "/data/example/hardhat.config.ts", +// remappings: [], +// workspaceFolder: { +// name: "example", +// uri: "file:///data/example", +// }, +// }, +// }); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWith( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { +// jobId: 1, +// path: "/data/example/contracts/one.sol", +// current: 1, +// total: 1, +// } +// ); +// }); +// }); + +// describe("adding a workspace that is nested in already indexed workspaces", () => { +// let serverState: ServerState; +// let existingFolders: WorkspaceFolder[]; +// let addedFolders: WorkspaceFolder[]; + +// before(async () => { +// existingFolders = [ +// { +// name: "example", +// uri: "file:///data/example", +// }, +// ]; + +// addedFolders = [ +// { +// name: "sub", +// uri: "file:///data/example/sub", +// }, +// ]; + +// const mockWorkspaceFileRetriever = setupMockWorkspaceFileRetriever( +// { +// "/data/example": ["/data/example/hardhat.config.ts"], +// }, +// { +// "/data/example": ["/data/example/contracts/one.sol"], +// } +// ); + +// serverState = buildServerState({ existingFolders }); + +// await indexWorkspaceFolders( +// serverState, +// mockWorkspaceFileRetriever, +// addedFolders +// ); +// }); + +// it("should not change the workspace folders", () => { +// assert.deepStrictEqual( +// serverState.indexedWorkspaceFolders, +// existingFolders +// ); +// }); + +// it("should not add any projects", () => { +// assert.deepStrictEqual(serverState.projects, {}); +// }); + +// it("should not add any solidity files", () => { +// assert.deepStrictEqual(serverState.solFileIndex, {}); +// }); + +// it("should notify the client of indexing starting", () => { +// sinon.assert.calledWithExactly( +// serverState.connection.sendNotification as any, +// "custom/indexing-start", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); + +// it("should notify the client of indexing finishing", () => { +// sinon.assert.calledWith( +// serverState.connection.sendNotification as any, +// "custom/indexing-file", +// { +// jobId: 1, +// path: "", +// current: 0, +// total: 0, +// } +// ); +// }); +// }); +// }); +// }); + +// function buildServerState({ +// existingFolders, +// }: { +// existingFolders: WorkspaceFolder[]; +// }): ServerState { +// const mockConnection = setupMockConnection(); +// const mockLogger = setupMockLogger(); + +// return { +// indexJobCount: 0, +// // eslint-disable-next-line @typescript-eslint/no-explicit-any +// connection: mockConnection as any, +// solFileIndex: {}, +// indexedWorkspaceFolders: existingFolders, +// projects: {}, +// logger: mockLogger, +// } as unknown as ServerState; +// } diff --git a/server/test/services/navigation/definition.ts b/server/test/services/navigation/definition.ts index 0e9bdf5f..9b4ca11f 100644 --- a/server/test/services/navigation/definition.ts +++ b/server/test/services/navigation/definition.ts @@ -226,7 +226,6 @@ describe("Parser", () => { documents: [ { uri: parentUri, analyze: true }, { uri: childUri, analyze: true }, - { uri: parentUri, analyze: true }, ], errors: [], })); diff --git a/server/test/services/validation/convertHardhatErrorToDiagnostic.ts b/server/test/services/validation/convertHardhatErrorToDiagnostic.ts deleted file mode 100644 index 337b7a6b..00000000 --- a/server/test/services/validation/convertHardhatErrorToDiagnostic.ts +++ /dev/null @@ -1,344 +0,0 @@ -import { - DiagnosticSeverity, - TextDocument, - Diagnostic, - Range, -} from "@common/types"; -import { - convertHardhatErrorToDiagnostic, - IMPORT_FILE_ERROR_CODES, - IMPORT_LIBRARY_ERROR_CODES, -} from "@services/validation/convertHardhatErrorToDiagnostic"; -import { assert } from "chai"; - -interface ErrorDescription { - number: number; - message: string; - title: string; - description: string; - shouldBeReported: false; -} - -describe("validation", () => { - describe("convertHardhatErrorToDiagnostic", () => { - const exampleUri = "/example"; - - describe("404 - Imported file not found", () => { - it("should convert to a diagnostic", () => { - assertConversionToDiagnostic( - "./nonexistant.sol", - { - number: 404, - message: "File %imported%, imported from %from%, not found.", - title: "Imported file not found", - description: `One of your source files imported a nonexistent file. - Please double check your imports.`, - shouldBeReported: false, - }, - { - message: "Imported file not found", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 25 }, - }, - } - ); - }); - }); - - describe("405 - Invalid import: use / instead of \\", () => { - it("should convert to a diagnostic", () => { - assertConversionToDiagnostic( - ".\\access\\Auth.sol", - { - number: 405, - message: - "Invalid import %imported% from %from%. Imports must use / instead of \\, even in Windows", - title: "Invalid import: use / instead of \\", - description: `A Solidity file is trying to import another file via relative path and is using backslashes (\\\\) instead of slashes (/). - -You must always use slashes (/) in Solidity imports.`, - shouldBeReported: false, - }, - { - message: "Invalid import: use / instead of \\", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 25 }, - }, - } - ); - }); - }); - - describe("406 - trying to import a file using an unsupported protocol", () => { - it("should convert to a diagnostic", () => { - const errorDescription = `A Solidity file is trying to import a file using an unsupported protocol, like http. - -You can only import files that are available locally or installed through npm.`; - - assertConversionToDiagnostic( - "ipfs://abbiji", - { - number: 406, - message: - "Invalid import %imported% from %from%. Hardhat doesn't support imports via %protocol%.", - title: "Invalid import: trying to use an unsupported protocol", - description: errorDescription, - shouldBeReported: false, - }, - { - message: "Invalid import: trying to use an unsupported protocol", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 21 }, - }, - } - ); - }); - }); - - describe("407 - Invalid import: absolute paths unsupported", () => { - it("should convert to a diagnostic", () => { - assertConversionToDiagnostic( - "/Users/example/file.sol", - { - number: 407, - message: - "Invalid import %imported% from %from%. Hardhat doesn't support imports with absolute paths.", - title: "Invalid import: absolute paths unsupported", - description: `A Solidity file is trying to import a file using its absolute path. - -This is not supported, as it would lead to hard-to-reproduce compilations.`, - shouldBeReported: false, - }, - { - message: "Invalid import: absolute paths unsupported", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 31 }, - }, - } - ); - }); - }); - - describe("408 - Illegal Solidity import", () => { - it("should convert to a diagnostic", () => { - const errorDescription = `A Solidity file is trying to import a file that is outside of the project. - -This is not supported by Hardhat.`; - - assertConversionToDiagnostic( - "../../../../outside.sol", - { - number: 408, - message: - "Invalid import %imported% from %from%. The file being imported is outside of the project", - title: "Invalid import: file outside of the project", - description: errorDescription, - shouldBeReported: false, - }, - { - message: "Invalid import: file outside of the project", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 31 }, - }, - } - ); - }); - }); - - describe("409 - Invalid import: wrong file casing", () => { - it("should convert to a diagnostic", () => { - const errorDescription = `A Solidity file is trying to import a file but its source name casing was wrong. - -Hardhat's compiler is case sensitive to ensure projects are portable across different operating systems.`; - - assertConversionToDiagnostic( - "./WRONGCASE.sol", - { - number: 409, - message: - "Trying to import %imported% from %from%, but it has an incorrect casing.", - title: "Invalid import: wrong file casing", - description: errorDescription, - shouldBeReported: false, - }, - { - message: "Invalid import: wrong file casing", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 23 }, - }, - } - ); - }); - }); - - describe("411 - Invalid import: missing library", () => { - it("should convert to a diagnostic", () => { - const errorDescription = - "A Solidity file is trying to import another which belongs to a library that is not installed.\n \nTry installing the library using npm."; - - assertConversionToDiagnostic( - "@foo/Bar.sol", - { - number: 411, - message: - "The library %library%, imported from %from%, is not installed. Try installing it using npm.", - title: "Invalid import: library not installed", - description: errorDescription, - shouldBeReported: false, - }, - { - message: "Invalid import: library not installed", - range: { - start: { line: 0, character: 8 }, - end: { line: 0, character: 20 }, - }, - } - ); - }); - }); - - describe("unhandled - an unknown hardhat error", () => { - const fileText = ` -//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -import "1123453";`; - - const unknownErrorDescription = "Unknown text about the error"; - - let diagnostic: Diagnostic | null; - - beforeEach(() => { - const document = TextDocument.create( - exampleUri, - "solidity", - 0, - fileText - ); - - diagnostic = convertHardhatErrorToDiagnostic(document, { - name: "HardhatError", - errorDescriptor: { - number: 999, - title: "unknown - some unknown error", - description: unknownErrorDescription, - message: "an unknown error", - shouldBeReported: false, - }, - }); - }); - - it("should not convert to a diagnostic", () => { - assert.deepStrictEqual(diagnostic, null); - }); - }); - - describe("badly formed hardhat error", () => { - const fileText = ` -//SPDX-License-Identifier: Unlicense`; - - let diagnostic: Diagnostic | null; - - beforeEach(() => { - const document = TextDocument.create( - exampleUri, - "solidity", - 0, - fileText - ); - - diagnostic = convertHardhatErrorToDiagnostic(document, { - name: "HardhatError", - // eslint-disable-next-line @typescript-eslint/no-explicit-any - errorDescriptor: undefined as any, - }); - }); - - it("should not convert to a diagnostic", () => { - assert.deepStrictEqual(diagnostic, null); - }); - }); - - describe("unmatchable error to file text", () => { - const fileText = ` -//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -import "existant.sol";`; - - let diagnostic: Diagnostic | null; - - beforeEach(() => { - const document = TextDocument.create( - exampleUri, - "solidity", - 0, - fileText - ); - - diagnostic = convertHardhatErrorToDiagnostic(document, { - name: "HardhatError", - errorDescriptor: { - number: 404, - message: "File {x} not found", - title: "Imported file not found", - description: `One of your source files imported a nonexistent file. - Please double check your imports.`, - shouldBeReported: false, - }, - messageArguments: { imported: "nonexistant.sol" }, - }); - }); - - it("should not convert to a diagnostic", () => { - assert.deepStrictEqual(diagnostic, null); - }); - }); - }); -}); - -function assertConversionToDiagnostic( - importLine: string, - errorDescription: ErrorDescription, - expected: { - message: string; - range: Range; - } -) { - const exampleUri = "/example"; - - const fileText = `import "${importLine}";`; - - const document = TextDocument.create(exampleUri, "solidity", 0, fileText); - - let messageArguments = {}; - if (IMPORT_FILE_ERROR_CODES.includes(errorDescription.number)) { - messageArguments = { imported: importLine }; - } else if (IMPORT_LIBRARY_ERROR_CODES.includes(errorDescription.number)) { - messageArguments = { library: importLine }; - } - - const diagnostic: Diagnostic | null = convertHardhatErrorToDiagnostic( - document, - { - name: "HardhatError", - errorDescriptor: errorDescription, - messageArguments, - } - ); - - if (diagnostic === null) { - assert.fail("No diagnostic returned"); - } - - assert.deepStrictEqual(diagnostic, { - severity: DiagnosticSeverity.Error, - code: errorDescription.number, - source: "hardhat", - ...expected, - }); -} diff --git a/server/test/services/validation/hardhatWorker.ts b/server/test/services/validation/hardhatWorker.ts deleted file mode 100644 index 236a9c26..00000000 --- a/server/test/services/validation/hardhatWorker.ts +++ /dev/null @@ -1,157 +0,0 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -import { HardhatProject } from "@analyzer/HardhatProject"; -import { HardhatWorker } from "@services/validation/HardhatWorker"; -import { assert } from "chai"; -import * as sinon from "sinon"; -import { Connection } from "vscode-languageserver"; -import { setupMockConnection } from "../../helpers/setupMockConnection"; -import { setupMockLogger } from "../../helpers/setupMockLogger"; - -describe("Hardhat Worker", () => { - const exampleProj: HardhatProject = { - type: "hardhat", - basePath: "/example", - configPath: "/example/hardhat.config.js", - remappings: [], - workspaceFolder: { - name: "example", - uri: "/example", - }, - }; - const mockConnection = setupMockConnection(); - let mockChildProcess: any; - const processFactory = () => { - return mockChildProcess; - }; - const mockLogger = setupMockLogger(); - let hardhatWorker: HardhatWorker; - - beforeEach(() => { - // Instantiate mocks before each testcase - mockChildProcess = { - callbacks: {}, - kill: sinon.spy(), - on(event: string, callback: () => {}) { - this.callbacks[event] = callback; - }, - }; - hardhatWorker = new HardhatWorker( - exampleProj, - processFactory, - mockLogger, - mockConnection as unknown as Connection - ); - }); - - describe("initialization", () => { - it("should set the worker to STARTING", () => { - hardhatWorker.init(); - - assert.equal(hardhatWorker.status, "STARTING"); - }); - - describe("when already starting", () => { - it("should error", () => { - hardhatWorker.status = "STARTING"; - - assert.throws( - () => hardhatWorker.init(), - "Cannot start a worker thread that has already started" - ); - }); - }); - - describe("when already running", () => { - it("should error", () => { - hardhatWorker.status = "RUNNING"; - - assert.throws( - () => hardhatWorker.init(), - "Cannot start a worker thread that has already started" - ); - }); - }); - - describe("on child's initialization complete", function () { - it("sends a custom notification", async () => { - hardhatWorker.init(); - const onMessageCallback = mockChildProcess.callbacks.message; - onMessageCallback({ type: "INITIALISATION_COMPLETE" }); - sinon.assert.calledWith( - mockConnection.sendNotification, - "custom/worker-initialized", - { projectBasePath: exampleProj.basePath } - ); - }); - }); - }); - - describe("on exit", () => { - let openJob: any; - - beforeEach(() => { - openJob = { - resolve: sinon.spy(), - reject: sinon.spy(), - }; - - hardhatWorker.jobs.example = openJob; - - hardhatWorker.init(); - }); - - describe("when running", () => { - beforeEach(() => { - hardhatWorker.status = "RUNNING"; - - hardhatWorker.handleExit(1, null); - }); - - it("should cancel any open jobs", () => { - assert.lengthOf(Object.values(hardhatWorker.jobs), 0); - sinon.assert.called(openJob.reject); - }); - - it("should restart", () => { - sinon.assert.called(mockChildProcess.kill); - }); - - it("should set the worker to STARTING", () => { - assert.equal(hardhatWorker.status, "STARTING"); - }); - }); - - describe("when starting", () => { - beforeEach(() => { - hardhatWorker.handleExit(1, null); - }); - - it("should not restart", () => { - sinon.assert.notCalled(mockChildProcess.kill); - }); - - it("should set the worker to INITIALIZATION_ERRORED", () => { - assert.equal(hardhatWorker.status, "INITIALIZATION_ERRORED"); - }); - }); - - describe("termination through signal", () => { - beforeEach(() => { - hardhatWorker.handleExit(1, "SIGTERM"); - }); - - it("should cancel any open jobs", () => { - assert.lengthOf(Object.values(hardhatWorker.jobs), 0); - sinon.assert.called(openJob.reject); - }); - - it("should not restart", () => { - sinon.assert.notCalled(mockChildProcess.kill); - }); - - it("should set the worker back to UNINITIALIZED", () => { - assert.equal(hardhatWorker.status, "UNINITIALIZED"); - }); - }); - }); -}); diff --git a/server/test/services/validation/validate.test.ts b/server/test/services/validation/validate.test.ts new file mode 100644 index 00000000..062ae5e4 --- /dev/null +++ b/server/test/services/validation/validate.test.ts @@ -0,0 +1,30 @@ +// import { assert } from "chai"; +// import { TextDocumentChangeEvent } from "vscode-languageserver"; +// import { TextDocument } from "vscode-languageserver-textdocument"; +// import { validate } from "../../../src/services/validation/validate"; +// import { ServerState } from "../../../src/types"; +// import { setupMockTelemetry } from "../../helpers/setupMockTelemetry"; + +// let serverState: ServerState; +// let change: TextDocumentChangeEvent; + +// beforeEach(async () => { +// serverState = { +// telemetry: setupMockTelemetry(), +// indexingFinished: true, +// } as unknown as ServerState; + +// change = { +// document: { +// uri: "foo.ts", +// }, +// } as any; +// }); + +// describe.only("validate", function () { +// it("returns if indexing stage is not finished", async () => { +// serverState.indexingFinished = false; +// const result = await validate(serverState, change); +// assert.isFalse(result); +// }); +// }); diff --git a/server/test/services/validation/validation.ts b/server/test/services/validation/validation.ts deleted file mode 100644 index 9f9a05eb..00000000 --- a/server/test/services/validation/validation.ts +++ /dev/null @@ -1,1202 +0,0 @@ -import { validate } from "@services/validation/validate"; -import { Logger } from "@utils/Logger"; -import { assert } from "chai"; -import * as path from "path"; -import sinon from "sinon"; -import { TextDocument } from "vscode-languageserver-textdocument"; -import { - CancelledValidation, - HardhatCompilerError, - HardhatThrownError, - JobCompletionError, - UnknownError, - ValidationCompleteMessage, - ValidationJobStatusNotification, - ValidatorError, -} from "../../../src/types"; -import { forceToUnixStyle } from "../../helpers/forceToUnixStyle"; -import { prependWithSlash } from "../../helpers/prependWithSlash"; -import { setupMockConnection } from "../../helpers/setupMockConnection"; -import { setupMockLanguageServer } from "../../helpers/setupMockLanguageServer"; -import { setupMockLogger } from "../../helpers/setupMockLogger"; -import { waitUntil } from "../../helpers/waitUntil"; -import { setupMockTelemetry } from "../../helpers/setupMockTelemetry"; -import { runningOnWindows } from "../../../src/utils/operatingSystem"; - -describe("Parser", () => { - describe("Validation", function () { - const workspaceFolder = prependWithSlash( - forceToUnixStyle(path.join(__dirname, "../..")) - ); - - const projectUri = forceToUnixStyle( - path.join(__dirname, "hardhat.config.js") - ); - - const basicUri = forceToUnixStyle( - path.join(__dirname, "testData", "Basic.sol") - ); - - const blockedUri = forceToUnixStyle( - path.join(__dirname, "testData", "Blocked.sol") - ); - - let mockConnection: ReturnType; - - describe("validation fail - solc warnings/errors from worker", () => { - describe("pass through", () => { - const exampleError: HardhatCompilerError = { - sourceLocation: { - file: basicUri, - start: 5, - end: 15, - }, - errorCode: "101", - severity: "error", - message: "It went wrong!", - formattedMessage: "-", - type: "DeclarationError", - component: "general", - }; - - beforeEach(async () => { - ({ connection: mockConnection } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [{ uri: basicUri, analyze: true }], - errors: [exampleError], - })); - - try { - await waitUntil( - () => mockConnection.sendDiagnostics.calledOnce, - 100, - 1000 - ); - } catch { - assert.fail("Send diagnostics not called"); - } - }); - - it("should convert error to diagnostic", async () => { - assert(mockConnection.sendDiagnostics.calledOnce); - const { uri, diagnostics } = - mockConnection.sendDiagnostics.firstCall.firstArg; - - assert.equal(uri, basicUri); - assert.deepStrictEqual(diagnostics, [ - { - code: "101", - message: "It went wrong!", - severity: 1, - source: "solidity", - range: { - start: { - character: 5, - line: 0, - }, - end: { - character: 15, - line: 0, - }, - }, - }, - ]); - }); - }); - - describe("enhancement", () => { - describe.skip("function level error/warning", () => { - const mutabliltyRestrictToViewWarning: HardhatCompilerError = { - component: "general", - errorCode: "2018", - formattedMessage: - "Warning: Function state mutability can be restricted to view\n --> contracts/Greeter.sol:14:2:\n |\n14 | \tfunction greet() public returns (string memory) {\n | \t^ (Relevant source part starts here and spans across multiple lines).\n\n", - message: "Function state mutability can be restricted to view", - severity: "warning", - sourceLocation: { - file: basicUri, - start: 445, - end: 556, - }, - type: "DeclarationError", - }; - - beforeEach(async () => { - ({ connection: mockConnection } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [{ uri: basicUri, analyze: true }], - errors: [mutabliltyRestrictToViewWarning], - })); - - try { - await waitUntil( - () => mockConnection.sendDiagnostics.calledOnce, - 100, - 1000 - ); - } catch { - assert.fail("Send diagnostics not called"); - } - }); - - it("should convert constrain range of mutability warning", async () => { - assert(mockConnection.sendDiagnostics.calledOnce); - - const { uri, diagnostics } = - mockConnection.sendDiagnostics.firstCall.firstArg; - - assert.equal(uri, basicUri); - assert.deepStrictEqual(diagnostics, [ - { - code: "2018", - message: "Function state mutability can be restricted to view", - severity: 2, - source: "solidity", - range: { - start: { - line: 24, - character: 11, - }, - end: { - line: 24, - character: 16, - }, - }, - data: { - functionSourceLocation: { - start: 445, - end: 556, - }, - }, - }, - ]); - }); - }); - - describe("contract level error/warning", () => { - describe('3656 - Contract "Counter" should be marked as abstract', () => { - const interfacesUri = forceToUnixStyle( - path.join(__dirname, "testData", "Interfaces.sol") - ); - - const markAsAbstractError: HardhatCompilerError = { - component: "general", - errorCode: "3656", - formattedMessage: "", - message: 'Contract "Counter" should be marked as abstract.', - severity: "error", - sourceLocation: { - file: interfacesUri, - start: 131, - end: 162, - }, - type: "DeclarationError", - }; - - beforeEach(async () => { - ({ connection: mockConnection } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [{ uri: interfacesUri, analyze: true }], - errors: [markAsAbstractError], - })); - - try { - await waitUntil( - () => mockConnection.sendDiagnostics.calledOnce, - 100, - 1000 - ); - } catch { - assert.fail("Send diagnostics not called"); - } - }); - - it("should convert constrain range of mark as abstract error", async () => { - assert(mockConnection.sendDiagnostics.calledOnce); - const { uri, diagnostics } = - mockConnection.sendDiagnostics.firstCall.firstArg; - - assert.equal(uri, interfacesUri); - assert.deepStrictEqual(diagnostics, [ - { - code: "3656", - message: 'Contract "Counter" should be marked as abstract.', - severity: 1, - source: "solidity", - range: { - start: { - line: 7, - character: 9, - }, - end: { - line: 7, - character: 16, - }, - }, - data: { - functionSourceLocation: { - start: 131, - end: 162, - }, - }, - }, - ]); - }); - }); - - describe("5574 - Contract Size", () => { - const contractCodeSizeUri = forceToUnixStyle( - path.join(__dirname, "testData", "ContractCodeSize.sol") - ); - - const contractSizeError: HardhatCompilerError = { - component: "general", - errorCode: "5574", - formattedMessage: "", - message: - 'Contract code size exceeds 24576 bytes (a limit introduced in Spurious Dragon). This contract may not be deployable on mainnet. Consider enabling the optimizer (with a low "runs" value!), turning off revert strings, or using libraries.', - severity: "warning", - sourceLocation: { - file: contractCodeSizeUri, - start: 62, - end: 249, - }, - type: "DeclarationError", - }; - - beforeEach(async () => { - ({ connection: mockConnection } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [{ uri: contractCodeSizeUri, analyze: true }], - errors: [contractSizeError], - })); - - try { - await waitUntil( - () => mockConnection.sendDiagnostics.calledOnce, - 100, - 1000 - ); - } catch { - assert.fail("Send diagnostics not called"); - } - }); - - it("should convert constrain range of mark as abstract error", async () => { - assert(mockConnection.sendDiagnostics.calledOnce); - const { uri, diagnostics } = - mockConnection.sendDiagnostics.firstCall.firstArg; - - assert.equal(uri, contractCodeSizeUri); - assert.deepStrictEqual(diagnostics, [ - { - code: "5574", - message: - 'Contract code size exceeds 24576 bytes (a limit introduced in Spurious Dragon). This contract may not be deployable on mainnet. Consider enabling the optimizer (with a low "runs" value!), turning off revert strings, or using libraries.', - severity: 2, - source: "solidity", - range: { - start: { - line: 3, - character: 9, - }, - end: { - line: 3, - character: 14, - }, - }, - data: { - functionSourceLocation: { - start: 62, - end: 249, - }, - }, - }, - ]); - }); - }); - }); - }); - - describe("blocking", () => { - const addOverrideErrorFoo: HardhatCompilerError = { - component: "general", - errorCode: "9456", - formattedMessage: "Error: ...", - message: 'Overriding function is missing "override" specifier.', - severity: "error", - sourceLocation: { - file: blockedUri, - start: 248, - end: 272, - }, - type: "DeclarationError", - }; - - const addMultioverrideErrorFoo: HardhatCompilerError = { - component: "general", - errorCode: "4327", - formattedMessage: "Error: ...", - message: - 'Function needs to specify overridden contracts "Alpha" and "Gamma".', - severity: "error", - sourceLocation: { - file: blockedUri, - start: 248, - end: 272, - }, - type: "DeclarationError", - }; - - const addOverrideErrorBar: HardhatCompilerError = { - component: "general", - errorCode: "9456", - formattedMessage: "Error: ...", - message: 'Overriding function is missing "override" specifier.', - severity: "error", - sourceLocation: { - file: blockedUri, - start: 276, - end: 300, - }, - type: "DeclarationError", - }; - - beforeEach(async () => { - ({ connection: mockConnection } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [{ uri: blockedUri, analyze: true }], - errors: [ - addOverrideErrorFoo, - addMultioverrideErrorFoo, - addOverrideErrorBar, - ], - })); - - try { - await waitUntil( - () => mockConnection.sendDiagnostics.calledOnce, - 100, - 1000 - ); - } catch { - assert.fail("Send diagnostics not called"); - } - }); - - it("should remove diagnostics blocked by more important diagnostics", async () => { - assert(mockConnection.sendDiagnostics.calledOnce); - const { uri, diagnostics } = - mockConnection.sendDiagnostics.firstCall.firstArg; - - assert.equal(uri, blockedUri); - assert.deepStrictEqual(diagnostics, [ - // only the multi-override survives on foo - { - code: "4327", - message: - 'Function needs to specify overridden contracts "Alpha" and "Gamma".', - severity: 1, - source: "solidity", - range: { - start: { - line: 14, - character: 11, - }, - end: { - line: 14, - character: 14, - }, - }, - data: { - functionSourceLocation: { - start: 248, - end: 272, - }, - }, - }, - // only the single override on bar is unaffected - { - code: "9456", - message: 'Overriding function is missing "override" specifier.', - severity: 1, - source: "solidity", - range: { - start: { - line: 16, - character: 11, - }, - end: { - line: 16, - character: 14, - }, - }, - data: { - functionSourceLocation: { - start: 276, - end: 300, - }, - }, - }, - ]); - }); - }); - - describe("warnings/errors in a dependency but none in open editor", () => { - const exampleError: HardhatCompilerError = { - sourceLocation: { - file: basicUri, - start: 5, - end: 15, - }, - errorCode: "101", - severity: "error", - message: "It went wrong!", - formattedMessage: "-", - type: "DeclarationError", - component: "general", - }; - - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - const logger = setupMockLogger(); - - const workerReturnMessage: ValidationCompleteMessage = { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_FAIL", - jobId: 1, - version: "0.8.0", - projectBasePath: "/projects/example", - errors: [exampleError], - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should clear diagnostics", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: "/projects/example/contracts/first.sol", - }); - }); - - it("should indicate success for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - assert.deepStrictEqual(sendNotification.args[0][1], { - validationRun: true, - projectBasePath: "/projects/example", - version: "0.8.0", - }); - }); - }); - }); - - describe("validation pass - no solc warnings/errors from worker", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - const logger = setupMockLogger(); - - const workerReturnMessage: ValidationCompleteMessage = { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 1, - version: "0.8.0", - projectBasePath: "/projects/example", - sources: [ - runningOnWindows() - ? "c:/projects/example/contracts/first.sol" - : "/projects/example/contracts/first.sol", - ], - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should clear diagnostics", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: runningOnWindows() - ? "/c:/projects/example/contracts/first.sol" - : "/projects/example/contracts/first.sol", - }); - }); - - it("should indicate success for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - assert.deepStrictEqual(sendNotification.args[0][1], { - validationRun: true, - projectBasePath: "/projects/example", - version: "0.8.0", - }); - }); - }); - - describe("validation errored - process failed within worker", () => { - describe("hardhat error", () => { - describe("import line error", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage: HardhatThrownError = { - type: "VALIDATION_COMPLETE", - status: "HARDHAT_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - hardhatError: { - name: "HardhatError", - errorDescriptor: { - number: 406, - message: - "Invalid import %imported% from %from%. Hardhat doesn't support imports via %protocol%.", - title: - "Invalid import: trying to use an unsupported protocol", - description: "A Solidity file is trying to import...", - shouldBeReported: false, - }, - messageArguments: { imported: "./nonexistant.sol" }, - }, - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should send the import line diagnostic", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [ - { - source: "hardhat", - code: 406, - severity: 1, - message: - "Invalid import: trying to use an unsupported protocol", - range: { - start: { - line: 1, - character: 9, - }, - end: { - line: 1, - character: 26, - }, - }, - }, - ], - uri: "/projects/example/contracts/first.sol", - }); - }); - - it("should indicate failure for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - - const expectedFailureStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath: "/projects/example", - reason: "import line hardhat error", - displayText: "import error", - }; - - assert.deepStrictEqual( - sendNotification.args[0][1], - expectedFailureStatus - ); - }); - }); - - describe("non-import line error", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - const projectBasePath = runningOnWindows() - ? "c:/projects/example" - : "/projects/example"; - const errorFile = runningOnWindows() - ? "/c:/projects/example/importing.sol" - : "/projects/example/importing.sol"; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage: HardhatThrownError = { - type: "VALIDATION_COMPLETE", - status: "HARDHAT_ERROR", - jobId: 1, - projectBasePath, - hardhatError: { - name: "HardhatError", - errorDescriptor: { - number: 404, - message: "This is an example errror", - title: "Example error", - description: "This is an example error", - shouldBeReported: false, - }, - messageArguments: { - from: "importing.sol", - imported: "nonexistent.sol", - }, - }, - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should clear diagnostics", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: "/projects/example/contracts/first.sol", - }); - }); - - it("should log the error for triage", async () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - assert((logger.error as any).called); - }); - - it("should indicate failure for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - - const expectedFailureStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath, - reason: "non-import line hardhat error", - displayText: "Example error", - errorFile, - }; - - assert.deepStrictEqual( - sendNotification.args[0][1], - expectedFailureStatus - ); - }); - }); - }); - - describe("validator error", () => { - describe("initialization is still in progress", () => { - it("sends a failure status message", async () => - assertValidatorError("validator-starting", "validator starting")); - }); - - describe("initialization failed", () => { - it("sends a failure status message", async () => - assertValidatorError( - "validator-initialization-failed", - "unable to load hardhat config" - )); - }); - - describe("initialization failed", () => { - it("sends a failure status message", async () => - assertValidatorError( - "validator-in-unexpected-state", - "validator in unexpected state" - )); - }); - }); - - describe("job completion error", () => { - describe("directly imports incompatible file", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "directly-imports-incompatible-file", - "directly imports incompatible file" - )); - }); - - describe("incompatible overriden solc version", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "incompatible-overriden-solc-version", - "incompatible overriden solc version" - )); - }); - - describe("indirectly imports incompatible file", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "indirectly-imports-incompatible-file", - "indirectly imports incompatible file" - )); - }); - - describe("no compatibile solc version found", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "no-compatible-solc-version-found", - "no compatibile solc version found" - )); - }); - - describe("contract not in project", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "contract-not-in-project", - "not part of hardhat project" - )); - }); - - describe("unknown reason", () => { - it("sends a failure status message", async () => - assertJobCompletionError( - "unknown-failure-reason", - "unknown failure reason" - )); - }); - }); - - describe("unknown error", () => { - describe("node Error", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage: UnknownError = { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - error: { message: "this is unexpected" }, - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should clear diagnostics", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: "/projects/example/contracts/first.sol", - }); - }); - - it("should log the error for triage", async () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - assert((logger.error as any).called); - }); - - it("should indicate failure for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - - const expectedFailureStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath: "/projects/example", - reason: "unknown", - displayText: "this is unexpected", - }; - - assert.deepStrictEqual( - sendNotification.args[0][1], - expectedFailureStatus - ); - }); - }); - - describe("non-node error", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage: UnknownError = { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - error: "this is just a string", - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should clear diagnostics", async () => { - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: "/projects/example/contracts/first.sol", - }); - }); - - it("should log the error for triage", async () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - assert((logger.error as any).called); - }); - - it("should indicate failure for the solidity status", () => { - assert(sendNotification.called); - assert.equal( - sendNotification.args[0][0], - "custom/validation-job-status" - ); - - const expectedFailureStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath: "/projects/example", - reason: "unknown", - displayText: "internal error", - }; - - assert.deepStrictEqual( - sendNotification.args[0][1], - expectedFailureStatus - ); - }); - }); - }); - - describe("cancelled", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - - before(async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage: CancelledValidation = { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }; - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - }); - - it("should leave diagnostics to a subsequent validation", async () => { - assert(sendDiagnostics.notCalled); - }); - - it("should only log for trace purposes", async () => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - assert((logger.trace as any).called); - }); - - it("should leave the solidity status to a subsequent validation", () => { - assert(sendNotification.notCalled); - }); - }); - - describe("unrecognized message", () => { - let sendDiagnostics: sinon.SinonSpy; - let sendNotification: sinon.SinonSpy; - let logger: Logger; - - it("should error as this is a coding issue", async () => { - sendDiagnostics = sinon.spy(); - sendNotification = sinon.spy(); - logger = setupMockLogger(); - - const workerReturnMessage = { - type: "VALIDATION_COMPLETE", - status: "MADE UP!!!", - jobId: 1, - projectBasePath: "/projects/example", - }; - - try { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - await validateReturningWorkerMessage(workerReturnMessage as any, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - } catch (err: unknown) { - if (!isError(err)) { - assert.fail("Should be Error"); - } - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - assert.equal( - err.message, - "Unrecognized message status: MADE UP!!!" - ); - } - }); - }); - }); - - describe("bad server state", () => { - describe("no sol file entry", () => { - it("should log and ignore the validation message", async () => { - const mockLogger = setupMockLogger(); - - const serverState = { - solFileIndex: {}, - telemetry: setupMockTelemetry(), - logger: mockLogger, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; - - const document = TextDocument.create( - "/projects/example/contracts/first.sol", - "solidity", - 0, - '//ignore\n import("./nonexistant.sol")' - ); - - await validate(serverState, { document }); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const logError: any = mockLogger.error; - assert(logError.called); - - const calledWithError = logError.args[0][0]; - if (!isError(calledWithError)) { - assert.fail("Should be an error"); - } - - assert.equal( - calledWithError.message, - "Could not send to valiation process, uri is not indexed: /projects/example/contracts/first.sol" - ); - }); - }); - - describe("no worker process for project", () => { - it("should log and ignore the validation message", async () => { - const mockLogger = setupMockLogger(); - - const serverState = { - solFileIndex: { - "/projects/example/contracts/first.sol": { - uri: "/projects/example/contracts/first.sol", - text: "", - project: { type: "hardhat", basePath: "/projects/example" }, - }, - }, - workerProcesses: { - "/projects/example": undefined, - }, - logger: mockLogger, - telemetry: setupMockTelemetry(), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; - - const document = TextDocument.create( - "/projects/example/contracts/first.sol", - "solidity", - 0, - '//ignore\n import("./nonexistant.sol")' - ); - - await validate(serverState, { document }); - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const logError: any = mockLogger.error; - assert(logError.called); - - const calledWithError = logError.args[0][0]; - if (!isError(calledWithError)) { - assert.fail("Should be an error"); - } - - assert.equal( - calledWithError.message, - "No worker process for project: /projects/example" - ); - }); - }); - }); - }); -}); - -function isError(err: unknown): err is Error { - return err instanceof Error; -} - -async function validateReturningWorkerMessage( - workerReturnMessage: ValidationCompleteMessage, - { - sendDiagnosticsSpy, - sendNotificationSpy, - mockLogger, - }: { - sendDiagnosticsSpy: sinon.SinonSpy; - sendNotificationSpy: sinon.SinonSpy; - mockLogger: Logger; - } -) { - const serverState = { - solFileIndex: { - "/projects/example/contracts/first.sol": { - uri: "/projects/example/contracts/first.sol", - text: "", - tracking: "TRACKED", - project: { type: "hardhat", basePath: "/projects/example" }, - }, - }, - workerProcesses: { - "/projects/example": { - validate: (): ValidationCompleteMessage => workerReturnMessage, - }, - }, - connection: { - sendDiagnostics: sendDiagnosticsSpy, - sendNotification: sendNotificationSpy, - }, - documents: { - get: () => - TextDocument.create( - "file:///projects/example/contracts/first.sol", - "solidity", - 0, - "// ignore" - ), - }, - telemetry: setupMockTelemetry(), - logger: mockLogger, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; - - const document = TextDocument.create( - "/projects/example/contracts/first.sol", - "solidity", - 0, - '//ignore\n import("./nonexistant.sol")' - ); - - await validate(serverState, { document }); -} - -async function assertJobCompletionError( - reason: string, - expectedDisplayText: string -) { - return assertError( - { - type: "VALIDATION_COMPLETE", - status: "JOB_COMPLETION_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - reason, - }, - { reason, displayText: expectedDisplayText } - ); -} - -async function assertValidatorError( - reason: string, - expectedDisplayText: string -) { - return assertError( - { - type: "VALIDATION_COMPLETE", - status: "VALIDATOR_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - reason, - }, - { reason, displayText: expectedDisplayText } - ); -} - -async function assertError( - workerReturnMessage: JobCompletionError | ValidatorError, - expected: { reason: string; displayText: string } -) { - const sendDiagnostics = sinon.spy(); - const sendNotification = sinon.spy(); - const logger = setupMockLogger(); - - await validateReturningWorkerMessage(workerReturnMessage, { - sendDiagnosticsSpy: sendDiagnostics, - sendNotificationSpy: sendNotification, - mockLogger: logger, - }); - - assert(sendDiagnostics.called); - assert.deepStrictEqual(sendDiagnostics.args[0][0], { - diagnostics: [], - uri: "/projects/example/contracts/first.sol", - }); - - assert(sendNotification.called); - assert.equal(sendNotification.args[0][0], "custom/validation-job-status"); - - const expectedFailureStatus: ValidationJobStatusNotification = { - validationRun: false, - projectBasePath: "/projects/example", - reason: expected.reason, - displayText: expected.displayText, - }; - - assert.deepStrictEqual(sendNotification.args[0][1], expectedFailureStatus); -} diff --git a/server/test/services/validation/worker.ts b/server/test/services/validation/worker.ts deleted file mode 100644 index e2fe7b0e..00000000 --- a/server/test/services/validation/worker.ts +++ /dev/null @@ -1,1542 +0,0 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ -import { dispatch } from "@services/validation/worker/dispatch"; -import { assert } from "chai"; -import sinon from "sinon"; -import { HardhatError as FrameworkHardhatError } from "hardhat/internal/core/errors"; -import type { ErrorDescriptor } from "hardhat/internal/core/errors-list"; -import type { SolcBuild } from "hardhat/types"; -import type { SolcInput } from "@services/validation/worker/build/buildInputsToSolc"; -import type { - InvalidatePreprocessingCacheMessage, - ValidateCommand, - WorkerState, -} from "../../../src/types"; - -describe("worker", () => { - describe("validation job", () => { - const exampleValidation: ValidateCommand = { - type: "VALIDATE", - jobId: 1, - projectBasePath: "/projects/example", - uri: "/projects/example/contracts/first.sol", - documentText: - "// SPDX-License-Identifier: GPL-3.0\npragma solidity >=0.8.2 <0.9.0;", - openDocuments: [], - }; - - const exampleError = { - component: "general", - errorCode: "7920", - formattedMessage: - "DeclarationError: Identifier not found or not unique.\n" + - " --> contracts/Hover/HoverErrors.sol:11:22:\n" + - " |\n" + - "11 | type UserType is uint256a;\n" + - " | ^^^^^^^^\n" + - "\n", - message: "Identifier not found or not unique.", - severity: "error", - sourceLocation: { - file: "contracts/Hover/HoverErrors.sol", - start: 214, - end: 222, - }, - type: "DeclarationError", - }; - - describe("completes", () => { - describe("without solc warnings/errors", () => { - let workerState: WorkerState; - let send: any; - let capturedOptions: any; - - before(async () => { - const errors: unknown[] = []; - - workerState = setupWorkerState({ errors }); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_RUN_SOLC: async (options) => { - capturedOptions = options; - }, - }, - }); - - await dispatch(workerState)(exampleValidation); - - send = workerState.send; - }); - - it("should return 0 warnings/errors for the file", async () => { - assert(send.called); - assert.deepStrictEqual(send.args[0][0], { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 1, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - - it("should populate the compiler metadata cache", async () => { - assert("0.8.0" in workerState.compilerMetadataCache); - - const buildInfoPromise = await workerState.compilerMetadataCache[ - "0.8.0" - ]; - - assert.deepStrictEqual(buildInfoPromise, { - compilerPath: - "/projects/example/node_modules/hardhat/compilers/compiler1", - isSolcJs: false, - version: "0.8.0", - longVersion: "0.8.0", - }); - }); - - it("should pass overriden `outputSelection` settings to solc", async () => { - assert.deepStrictEqual(capturedOptions.input.settings, { - optimizer: { - enabled: false, - runs: 200, - }, - outputSelection: {}, - }); - }); - - it("should set the solc input cache", async () => { - assert.deepStrictEqual(workerState.previousChangedDocAnalysis, { - uri: "/projects/example/contracts/first.sol", - analysis: { imports: [], versionPragmas: [">=0.8.2 <0.9.0"] }, - }); - - assert.isDefined(workerState.previousSolcInput); - }); - }); - - describe("with solc warnings/errors", () => { - it("should return warnings/errors", async () => { - const errors = [exampleError]; - - const workerState = setupWorkerState({ errors }); - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - assert.deepStrictEqual(send.args[0][0], { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_FAIL", - jobId: 1, - projectBasePath: "/projects/example", - version: "0.8.0", - errors: [exampleError], - }); - }); - - it("should clear the preprocessing cache if an import line error is returned", async () => { - const importLineError = { ...exampleError, errorCode: "6275" }; - - const errors = [importLineError]; - - const workerState = setupWorkerState({ errors }); - - workerState.previousChangedDocAnalysis = { - uri: "example.sol", - analysis: { imports: [], versionPragmas: [] }, - }; - workerState.previousSolcInput = {} as any; - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - assert.deepStrictEqual(send.args[0][0], { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_FAIL", - jobId: 1, - projectBasePath: "/projects/example", - version: "0.8.0", - errors: [importLineError], - }); - - assert.isUndefined(workerState.previousChangedDocAnalysis); - assert.isUndefined(workerState.previousSolcInput); - }); - }); - - describe("with open editor files", () => { - let workerState: WorkerState; - let capturedOptions: any; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_RUN_SOLC: async (options) => { - capturedOptions = options; - }, - }, - }); - - await dispatch(workerState)({ - ...exampleValidation, - openDocuments: [ - { - uri: "/projects/example/contracts/first.sol", - documentText: "// expected", - }, - ], - }); - }); - - it("should pass the overriden files to solc", async () => { - assert.deepStrictEqual(capturedOptions.input.sources, { - "contracts/first.sol": { - content: "// expected", - }, - }); - }); - - it("should override the read file task to read from open docs", async () => { - const overwrittenNewAction = (workerState.hre as any).newAction; - - assert.isDefined(overwrittenNewAction); - - const openDocCotent = await overwrittenNewAction({ - absolutePath: "/projects/example/contracts/first.sol", - }); - - assert.equal(openDocCotent, "// expected"); - - const fromDiskContent = await overwrittenNewAction({ - absolutePath: "/ondisk.sol", - }); - - assert.equal(fromDiskContent, "Read from disk: /ondisk.sol"); - }); - }); - - describe("with cached compiler metadata", () => { - it("should not call `TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD`", async () => { - const workerState = setupWorkerState({ errors: [] }); - - workerState.compilerMetadataCache = { - "0.8.0": new Promise((resolve) => { - const solcBuild: SolcBuild = { - version: "0.8.0", - longVersion: "0.8.0", - compilerPath: - "/projects/example/node_modules/hardhat/compilers/compiler1", - isSolcJs: false, - }; - resolve(solcBuild); - }), - }; - - let solcBuildCalled = false; - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD: async () => { - solcBuildCalled = true; - }, - }, - }); - - await dispatch(workerState)(exampleValidation); - - assert( - !solcBuildCalled, - "Solc build should not have been called, the cache should have been used" - ); - }); - }); - - describe("with cached solc input", () => { - describe("matching the current uri", () => { - let solcCompileCalled: boolean; - let workerState: WorkerState; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_RUN_SOLC: async () => { - solcCompileCalled = true; - }, - }, - }); - - workerState.previousChangedDocAnalysis = { - uri: "/projects/example/contracts/first.sol", - analysis: { imports: [], versionPragmas: [">=0.8.2 <0.9.0"] }, - }; - workerState.previousSolcInput = { - input: { - sources: { - "contracts/first.sol": { content: "NOT OVERWRITTEN" }, - }, - }, - } as any; - - await dispatch(workerState)(exampleValidation); - }); - - it("should not call `solc compile`", async () => { - assert( - !solcCompileCalled, - "Solc compile should not have been called, the cache should have been used" - ); - }); - - it("overwrites the solc input", async () => { - assert.deepStrictEqual( - workerState.previousSolcInput?.input.sources[ - "contracts/first.sol" - ], - { - content: - "// SPDX-License-Identifier: GPL-3.0\npragma solidity >=0.8.2 <0.9.0;", - } - ); - }); - }); - - describe("not matching the current uri", () => { - let solcCompileCalled: boolean; - let workerState: WorkerState; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_RUN_SOLC: async () => { - solcCompileCalled = true; - }, - }, - }); - - workerState.previousChangedDocAnalysis = { - uri: "/projects/example/contracts/some_other.sol", - analysis: { imports: [], versionPragmas: [">=0.8.2 <0.9.0"] }, - }; - - await dispatch(workerState)(exampleValidation); - }); - - it("falls back to `solc compile`", async () => { - assert( - solcCompileCalled, - "Solc compile should have been called, the cache doesn't match" - ); - }); - }); - }); - - describe("with invalid cached solc input", () => { - let solcCompileCalled: boolean; - let workerState: WorkerState; - let originalSolcInput: SolcInput; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - originalSolcInput = Object.freeze({ - input: { - sources: { - invalid: { content: "invalid" }, - }, - }, - } as any); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_RUN_SOLC: async () => { - solcCompileCalled = true; - }, - }, - }); - - workerState.previousChangedDocAnalysis = { - uri: "/projects/example/contracts/first.sol", - analysis: { imports: [], versionPragmas: [">=0.8.2 <0.9.0"] }, - }; - workerState.previousSolcInput = originalSolcInput; - - await dispatch(workerState)(exampleValidation); - }); - - it("should fall back on `solc compile`", async () => { - assert( - solcCompileCalled, - "Solc compile should have been called as the cache was invalid" - ); - }); - - it("logs the error", async () => { - assert((workerState.logger.error as any).called); - }); - }); - }); - - describe("errors", () => { - describe("with a preprocess failure", () => { - describe("hardhat error", () => { - it("should return hardhat error", async () => { - const exampleErrorDescriptor: ErrorDescriptor = { - number: 123, - message: "error message", - title: "Example error", - description: "This is an example error", - shouldBeReported: false, - }; - - const throwOnDepGraph = () => { - throw new FrameworkHardhatError(exampleErrorDescriptor); - }; - - const workerState = setupWorkerState({ - errors: [], - throwOnDepGraph, - }); - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - - const { hardhatError, ...sentMessage } = send.args[0][0]; - - assert.deepStrictEqual(sentMessage, { - type: "VALIDATION_COMPLETE", - status: "HARDHAT_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - }); - - assert.deepStrictEqual(hardhatError, { - name: "HardhatError", - messageArguments: {}, - errorDescriptor: exampleErrorDescriptor, - }); - }); - }); - - describe("non-hardhat error", () => { - it("should return an unknown error", async () => { - const workerState = setupWorkerState({ - errors: [], - throwOnDepGraph: () => { - throw new Error("Non-hardhat error"); - }, - }); - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - - const { error, ...sentMessage } = send.args[0][0]; - - assert.deepStrictEqual(sentMessage, { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - }); - - assert.deepStrictEqual(error.message, "Non-hardhat error"); - assert.deepStrictEqual(error.name, "Error"); - }); - }); - - describe("exception", () => { - it("should clear the worker state", async () => { - const workerState = setupWorkerState({ - errors: [], - }); - - workerState.buildJobs = { - bad: { - uri: "bad", - jobId: 99, - openDocuments: [], - documentText: "// bad", - added: new Date(), - projectBasePath: "/bad", - }, - }; - - await dispatch(workerState)(exampleValidation); - - assert.equal(workerState.current, null); - assert.deepStrictEqual(workerState.buildJobs, {}); - assert.deepStrictEqual(workerState.buildQueue, []); - assert.deepStrictEqual(workerState.compilerMetadataCache, {}); - assert.equal(workerState.previousSolcInput, undefined); - assert.equal(workerState.previousChangedDocAnalysis, undefined); - }); - - it("should ignore an issue with send", async () => { - const workerState = setupWorkerState({ - errors: [], - }); - - workerState.send = () => { - throw new Error("Send failed for this message"); - }; - - workerState.buildJobs = { - bad: { - uri: "bad", - jobId: 99, - openDocuments: [], - documentText: "// bad", - added: new Date(), - projectBasePath: "/bad", - }, - }; - - await dispatch(workerState)(exampleValidation); - - assert((workerState.logger.error as any).calledTwice); - }); - }); - - describe("build (compiler download) error", () => { - let workerState: WorkerState; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD: async () => { - throw new Error("Could not download compiler"); - }, - }, - }); - - await dispatch(workerState)(exampleValidation); - }); - - it("should send an error", async () => { - const send = workerState.send as any; - - assert(send.called); - - const { error, ...sentMessage } = send.args[0][0]; - - assert.deepStrictEqual(sentMessage, { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - }); - - assert.deepStrictEqual( - error.message, - "Could not download compiler" - ); - - assert.deepStrictEqual(error.name, "Error"); - }); - - it("should clear the compiler metadata cache", () => { - assert(workerState.compilerMetadataCache["0.8.0"] === undefined); - }); - }); - }); - - describe("compilation input job returned reason", () => { - it("should return hardhat error", async () => { - const workerState = setupWorkerState({ - errors: [], - compilationJob: { - reason: "incompatible-overriden-solc-version", - }, - }); - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - - const sentMessage = send.args[0][0]; - - assert.deepStrictEqual(sentMessage, { - type: "VALIDATION_COMPLETE", - status: "JOB_COMPLETION_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - reason: "incompatible-overriden-solc-version", - }); - }); - }); - - describe("into the catch all", () => { - it("should return an unknown error", async () => { - const workerState = setupWorkerState({ - errors: [], - getResolvedFiles: () => { - throw new Error("Non-hardhat error"); - }, - }); - - await dispatch(workerState)(exampleValidation); - - const send = workerState.send as any; - - assert(send.called); - - const { error, ...sentMessage } = send.args[0][0]; - - assert.deepStrictEqual(sentMessage, { - type: "VALIDATION_COMPLETE", - status: "UNKNOWN_ERROR", - jobId: 1, - projectBasePath: "/projects/example", - }); - - assert.deepStrictEqual(error.message, "Non-hardhat error"); - assert.deepStrictEqual(error.name, "Error"); - }); - }); - }); - - describe("cancel", () => { - // Setup the empty worker state, but - // expose a promise/resolve pair that allows - // us to "pause" the first messages build on the - // `TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS` step. - // We can then dispatch a second change to - // completion, and finally unpause the first. - it("should return a cancelled build job if one further change on same uri", async () => { - // Arrange - const workerState = setupWorkerState({ - errors: [], - }); - - const { - function: startEndFunc, - startPromise: getSourcesStartedPromise, - finishResolve: resolveGetSourcesFinished, - } = setupPausableFunction(); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS: startEndFunc, - }, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause on `TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS` - await getSourcesStartedPromise; - - if (workerState.current === null) { - return assert.fail("build is not in progress"); - } - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause `TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS` - resolveGetSourcesFinished(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call cancelled - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }); - - // Second call completes and validates - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 2, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - - // Setup a validation job that pauses twice, - // once on `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` and - // again on `TASK_COMPILE_SOLIDITY_COMPILE`. - // In each pause a further change message is sent. - it("should return 2 cancelled build jobs if two further changes on same uri", async () => { - // Arrange - const workerState = setupWorkerState({ - errors: [], - }); - - const { - function: getDependencyGraph, - startPromise: getDepenencyGraphStartPromise, - finishResolve: resolveGetDepenencyGraphFinished, - } = setupPausableFunction(); - - const { - function: runSolc, - startPromise: runSolcStartPromise, - finishResolve: runSolcFinished, - } = setupPausableFunction(); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH: getDependencyGraph, - TASK_COMPILE_SOLIDITY_RUN_SOLC: runSolc, - }, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause on `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` - await getDepenencyGraphStartPromise; - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` - resolveGetDepenencyGraphFinished(); - - // Pause on `TASK_COMPILE_SOLIDITY_COMPILE` - await runSolcStartPromise; - - // Send the third change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 3, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause on `TASK_COMPILE_SOLIDITY_COMPILE` - runSolcFinished(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call cancelled - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }); - - // Second call cancelled - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 2, - projectBasePath: "/projects/example", - }); - - // Third call completes and validates - const thirdJobValidationMessage = send.args[2][0]; - assert.deepStrictEqual(thirdJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 3, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - - it("should immediately cancel if change already in queue", async () => { - // Arrange - const workerState = setupWorkerState({ - errors: [], - }); - - const { - function: getDependencyGraph, - startPromise: getDepenencyGraphStartPromise, - finishResolve: resolveGetDepenencyGraphFinished, - } = setupPausableFunction(); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH: getDependencyGraph, - }, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause on `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` - await getDepenencyGraphStartPromise; - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/first.sol", - }); - - // Send the third change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 3, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` - resolveGetDepenencyGraphFinished(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call cancelled - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 2, - projectBasePath: "/projects/example", - }); - - // Second call cancelled - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }); - - // Third call completes and validates - const thirdJobValidationMessage = send.args[2][0]; - assert.deepStrictEqual(thirdJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 3, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - - // Setup a validation job that pauses on `TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES` - // to allow a second change on a different uri. Both should complete and validate. - it("should return two complete messages if two changes on different uris", async () => { - // Arrange - const workerState = setupWorkerState({ - errors: [], - }); - - const { - function: pausableFunc, - startPromise: getSourceNamesPromise, - finishResolve: resolveGetSourceNamesFinished, - } = setupPausableFunction(); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES: pausableFunc, - }, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause on `TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES` - await getSourceNamesPromise; - - if (workerState.current === null) { - return assert.fail("build is not in progress"); - } - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/second.sol", - }); - - // Unpause `TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES` - resolveGetSourceNamesFinished(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call completes and validates - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 1, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - - // Second call completes and validates - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 2, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS")); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES")); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH")); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE` error", () => - assertCancelOnFailureOf( - "TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE" - )); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT")); - - it("should cancel on `TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD")); - - it("should cancel on `TASK_COMPILE_SOLIDITY_RUN_SOLCJS` error", () => - assertCancelOnFailureOf("TASK_COMPILE_SOLIDITY_RUN_SOLCJS", { - isSolcJs: true, - })); - - it("should return a cancelled build job if reading the file cache errors", async () => { - const { - function: startEndFunc, - startPromise: getReadFromFilePromise, - finishResolve: resolveReadFromFile, - } = setupPausableFunction(); - - // Arrange - const workerState = setupWorkerState({ - errors: [], - readFromFile: async () => { - await startEndFunc(); - - return { - _cache: { - _format: "hh-sol-cache-2", - files: {}, - }, - }; - }, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause on read from cache - await getReadFromFilePromise; - - if (workerState.current === null) { - return assert.fail("build is not in progress"); - } - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause read from cache - resolveReadFromFile(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call cancelled - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }); - - // Second call completes and validates - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 2, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); - }); - }); - }); - - describe("invalidate preprocessing cache", () => { - let workerState: WorkerState; - - const exampleInvalidateMessage: InvalidatePreprocessingCacheMessage = { - type: "INVALIDATE_PREPROCESSING_CACHE", - }; - - before(async () => { - workerState = setupWorkerState({ errors: [] }); - - workerState.previousChangedDocAnalysis = { - uri: "/projects/example/contracts/first.sol", - analysis: { - versionPragmas: ["0.8.0"], - imports: ["./example.sol"], - }, - }; - - workerState.previousSolcInput = { fake: "input" } as any; - - await dispatch(workerState)(exampleInvalidateMessage); - }); - - it("should clear the preprocessing cache", async () => { - assert.equal(workerState.previousChangedDocAnalysis, undefined); - assert.equal(workerState.previousSolcInput, undefined); - }); - }); -}); - -function setupWorkerState({ - errors, - throwOnDepGraph, - getResolvedFiles, - readFromFile, - compilationJob, -}: { - errors: unknown[]; - throwOnDepGraph?: () => void; - getResolvedFiles?: () => string[]; - readFromFile?: (path: string) => any; - compilationJob?: any; -}) { - const mockLogger = { - log: sinon.spy(), - error: sinon.spy(), - trace: sinon.spy(), - }; - - const mockHre = setupMockHre({ - errors, - throwOnDepGraph, - getResolvedFiles, - compilationJob, - }); - - const workerState: WorkerState = { - current: null, - buildQueue: [], - buildJobs: {}, - hre: mockHre, - solidityFilesCachePath: "/cache", - originalReadFileAction: async ({ - absolutePath, - }: { - absolutePath: string; - }) => { - return `Read from disk: ${absolutePath}`; - }, - SolidityFilesCache: { - readFromFile: - readFromFile ?? - (() => ({ - _cache: { - _format: "hh-sol-cache-2", - files: {}, - }, - })), - }, - tasks: { - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS: - "TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS", - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES: - "TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES", - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH: - "TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH", - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE: - "TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE", - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT: - "TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT", - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD: - "TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD", - TASK_COMPILE_SOLIDITY_RUN_SOLCJS: "TASK_COMPILE_SOLIDITY_RUN_SOLCJS", - TASK_COMPILE_SOLIDITY_RUN_SOLC: "TASK_COMPILE_SOLIDITY_RUN_SOLC", - TASK_COMPILE_SOLIDITY_READ_FILE: "TASK_COMPILE_SOLIDITY_READ_FILE", - }, - compilerMetadataCache: {}, - send: sinon.spy(), - logger: mockLogger, - }; - - return workerState; -} - -function setupMockHre({ - errors, - throwOnDepGraph, - getResolvedFiles, - interleavedActions, - compilationJob, - isSolcJs = false, -}: { - errors: unknown[]; - throwOnDepGraph?: () => void; - getResolvedFiles?: () => string[]; - compilationJob?: any; - interleavedActions?: { - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD?: () => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_RUN_SOLCJS?: (options: unknown) => Promise; - // eslint-disable-next-line @typescript-eslint/naming-convention - TASK_COMPILE_SOLIDITY_RUN_SOLC?: (options: unknown) => Promise; - }; - isSolcJs?: boolean; -}) { - const compilationFiles: Array<{ - absolutePath: string; - content: { rawContent: string }; - }> = [ - { - absolutePath: "/projects/example/contracts/first.sol", - content: { rawContent: "// SPDX-License-Identifier: GPL-3.0" }, - }, - ]; - - const mockHre = { - tasks: { - TASK_COMPILE_SOLIDITY_READ_FILE: { - setAction: (newAction: unknown) => { - return (mockHre.newAction = newAction); - }, - }, - }, - - run: async (param: unknown, passedOptions: unknown) => { - if (param === "TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS !== - undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOURCE_PATHS(); - } - - return [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ]; - } - - if (param === "TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES !== - undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOURCE_NAMES(); - } - - return ["contracts/first.sol", "contracts/second.sol"]; - } - - if (param === "TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH") { - if (throwOnDepGraph !== undefined) { - return throwOnDepGraph(); - } - - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH !== - undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_DEPENDENCY_GRAPH(); - } - - return { - getResolvedFiles: - getResolvedFiles ?? - (() => [ - { - absolutePath: "/projects/example/contracts/first.sol", - content: { rawContent: "// SPDX-License-Identifier: GPL-3.0" }, - }, - { - absolutePath: "/projects/example/contracts/second.sol", - content: { rawContent: "// SPDX-License-Identifier: GPL-3.0" }, - }, - ]), - }; - } - - if (param === "TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE !== - undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_COMPILATION_JOB_FOR_FILE(); - } - - return compilationJob !== undefined - ? compilationJob - : { - getResolvedFiles: getResolvedFiles ?? (() => compilationFiles), - getSolcConfig: () => ({ - version: "0.8.0", - }), - }; - } - - if (param === "TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT !== - undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_COMPILER_INPUT(); - } - - const sources = Object.fromEntries( - compilationFiles.map(({ absolutePath, content: { rawContent } }) => [ - absolutePath.replace("/projects/example/", ""), - { content: rawContent }, - ]) - ); - - return { - language: "Solidity", - sources, - settings: { - optimizer: { enabled: false, runs: 200 }, - outputSelection: { - "*": { - "*": [ - "abi", - "evm.bytecode", - "evm.deployedBytecode", - "evm.methodIdentifiers", - "metadata", - ], - "": ["ast"], - }, - }, - }, - }; - } - - if (param === "TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD !== undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD(); - } - - return { - version: "0.8.0", - longVersion: "0.8.0", - compilerPath: - "/projects/example/node_modules/hardhat/compilers/compiler1", - isSolcJs, - }; - } - - if (param === "TASK_COMPILE_SOLIDITY_RUN_SOLCJS") { - if ( - interleavedActions?.TASK_COMPILE_SOLIDITY_RUN_SOLCJS !== undefined - ) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_RUN_SOLCJS( - passedOptions - ); - } - - return { - contracts: { - "": { - Auction: null, - AuctionBase: null, - }, - }, - sources: { - "contracts/first.sol": { ast: {}, id: 0 }, - "contracts/second.sol": { ast: {}, id: 0 }, - }, - errors, - }; - } - - if (param === "TASK_COMPILE_SOLIDITY_RUN_SOLC") { - if (interleavedActions?.TASK_COMPILE_SOLIDITY_RUN_SOLC !== undefined) { - await interleavedActions?.TASK_COMPILE_SOLIDITY_RUN_SOLC( - passedOptions - ); - } - - return { - contracts: { - "": { - Auction: null, - AuctionBase: null, - }, - }, - sources: { - "contracts/first.sol": { ast: {}, id: 0 }, - "contracts/second.sol": { ast: {}, id: 0 }, - }, - errors, - }; - } - - return null; - }, - } as any; - - return mockHre; -} - -function setupPromiseAndResolve() { - let externalResolve: () => void = () => { - return; - }; - - const promise = new Promise((resolve) => { - externalResolve = resolve; - }); - - return { - promise, - resolve: externalResolve, - }; -} - -function setupPausableFunction() { - const { promise: startPromise, resolve: startResolve } = - setupPromiseAndResolve(); - - const { promise: finishPromise, resolve: finishResolve } = - setupPromiseAndResolve(); - - const startEndFunc = async () => { - startResolve(); - - return finishPromise; - }; - - return { - startPromise, - finishResolve, - function: startEndFunc, - }; -} - -async function assertCancelOnFailureOf( - step: string, - options?: { isSolcJs: boolean } -) { - const exampleValidation: ValidateCommand = { - type: "VALIDATE", - jobId: 1, - projectBasePath: "/projects/example", - uri: "/projects/example/contracts/first.sol", - documentText: - "// SPDX-License-Identifier: GPL-3.0\npragma solidity >=0.8.2 <0.9.0;", - openDocuments: [], - }; - - // Arrange - const workerState = setupWorkerState({ - errors: [], - }); - - const { - function: startEndFunc, - startPromise: getSourcesStartedPromise, - finishResolve: resolveGetSourcesFinished, - } = setupPausableFunction(); - - workerState.hre = setupMockHre({ - errors: [], - interleavedActions: { - [step]: startEndFunc, - }, - isSolcJs: options?.isSolcJs !== undefined ? options?.isSolcJs : false, - }); - - // Act - send first change - const dispatchPromise = dispatch(workerState)({ - ...exampleValidation, - jobId: 1, - uri: "/projects/example/contracts/first.sol", - }); - - // Pause - await getSourcesStartedPromise; - - if (workerState.current === null) { - return assert.fail("build is not in progress"); - } - - // Send the second change - await dispatch(workerState)({ - ...exampleValidation, - jobId: 2, - uri: "/projects/example/contracts/first.sol", - }); - - // Unpause - resolveGetSourcesFinished(); - - // Complete the first change message - await dispatchPromise; - - // Assert - const send = workerState.send as any; - - assert(send.called); - - // First call cancelled - const firstJobValidationMessage = send.args[0][0]; - assert.deepStrictEqual(firstJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "CANCELLED", - jobId: 1, - projectBasePath: "/projects/example", - }); - - // Second call completes and validates - const secondJobValidationMessage = send.args[1][0]; - assert.deepStrictEqual(secondJobValidationMessage, { - type: "VALIDATION_COMPLETE", - status: "VALIDATION_PASS", - jobId: 2, - projectBasePath: "/projects/example", - version: "0.8.0", - sources: [ - "/projects/example/contracts/first.sol", - "/projects/example/contracts/second.sol", - ], - }); -} diff --git a/server/test/solFileDetails/getSolFileDetails.ts b/server/test/solFileDetails/getSolFileDetails.ts deleted file mode 100644 index 5b947b48..00000000 --- a/server/test/solFileDetails/getSolFileDetails.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { lowercaseDriveLetter, toUnixStyle } from "@utils/index"; -import * as assert from "assert"; -import * as path from "path"; -import { forceToUnixStyle } from "../helpers/forceToUnixStyle"; -import { prependWithSlash } from "../helpers/prependWithSlash"; -import { - OnRequest, - setupMockLanguageServer, -} from "../helpers/setupMockLanguageServer"; -import { runningOnWindows } from "../../src/utils/operatingSystem"; - -describe("Solidity Language Server", () => { - describe("get sol file details", () => { - const workspaceFolder = prependWithSlash( - forceToUnixStyle(path.join(__dirname, "..")) - ); - - const projectUri = forceToUnixStyle( - path.join(__dirname, "testData", "project", "hardhat.config.ts") - ); - - const outwithUri = forceToUnixStyle( - path.join(__dirname, "testData", "outwith.sol") - ); - - const withinUri = forceToUnixStyle( - path.join(__dirname, "testData", "project", "within.sol") - ); - - let request: OnRequest; - - before(async () => { - ({ - server: { request }, - } = await setupMockLanguageServer({ - projects: { [workspaceFolder]: [projectUri] }, - documents: [ - { uri: outwithUri, analyze: true }, - { uri: withinUri, analyze: true }, - ], - errors: [], - })); - }); - - it("returns the project config file for hardhat files", async () => { - const response = await request({ - uri: prependWithFilePrefix(withinUri), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - - assert.deepStrictEqual(response, { - found: true, - hardhat: true, - configPath: lowercaseDriveLetter(toUnixStyle(projectUri)), - configDisplayPath: "solFileDetails/testData/project/hardhat.config.ts", - }); - }); - - it("returns no project config for non-hardhat files", async () => { - const response = await request({ - uri: prependWithFilePrefix(outwithUri), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - - assert.deepStrictEqual(response, { - found: true, - hardhat: false, - }); - }); - - it("returns not found for unknown files", async () => { - const response = await request({ - uri: prependWithFilePrefix("nonexistant.sol"), - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - - assert.deepStrictEqual(response, { - found: false, - }); - }); - }); -}); - -function prependWithFilePrefix(filePath: string) { - return runningOnWindows() ? `file:///${filePath}` : `file://${filePath}`; -} diff --git a/server/tsconfig.json b/server/tsconfig.json index dc255b08..0d800974 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -18,7 +18,7 @@ "@services/*": ["./src/services/*"], "@utils/*": ["./src/utils/*"] } - } - // "include": ["src", "test"], + }, + "include": ["src", "test"] // "exclude": ["node_modules", ".vscode-test", ".eslintrc.js"] } diff --git a/server/yarn.lock b/server/yarn.lock index c63f5b20..4a71ed19 100644 --- a/server/yarn.lock +++ b/server/yarn.lock @@ -522,71 +522,71 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@nomicfoundation/solidity-analyzer-darwin-arm64@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-arm64/-/solidity-analyzer-darwin-arm64-0.0.3.tgz#1d49e4ac028831a3011a9f3dca60bd1963185342" - integrity sha512-W+bIiNiZmiy+MTYFZn3nwjyPUO6wfWJ0lnXx2zZrM8xExKObMrhCh50yy8pQING24mHfpPFCn89wEB/iG7vZDw== +"@nomicfoundation/solidity-analyzer-darwin-arm64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-arm64/-/solidity-analyzer-darwin-arm64-0.1.0.tgz#83a7367342bd053a76d04bbcf4f373fef07cf760" + integrity sha512-vEF3yKuuzfMHsZecHQcnkUrqm8mnTWfJeEVFHpg+cO+le96xQA4lAJYdUan8pXZohQxv1fSReQsn4QGNuBNuCw== -"@nomicfoundation/solidity-analyzer-darwin-x64@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-x64/-/solidity-analyzer-darwin-x64-0.0.3.tgz#c0fccecc5506ff5466225e41e65691abafef3dbe" - integrity sha512-HuJd1K+2MgmFIYEpx46uzwEFjvzKAI765mmoMxy4K+Aqq1p+q7hHRlsFU2kx3NB8InwotkkIq3A5FLU1sI1WDw== +"@nomicfoundation/solidity-analyzer-darwin-x64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-x64/-/solidity-analyzer-darwin-x64-0.1.0.tgz#1225f7da647ae1ad25a87125664704ecc0af6ccc" + integrity sha512-dlHeIg0pTL4dB1l9JDwbi/JG6dHQaU1xpDK+ugYO8eJ1kxx9Dh2isEUtA4d02cQAl22cjOHTvifAk96A+ItEHA== -"@nomicfoundation/solidity-analyzer-freebsd-x64@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-freebsd-x64/-/solidity-analyzer-freebsd-x64-0.0.3.tgz#8261d033f7172b347490cd005931ef8168ab4d73" - integrity sha512-2cR8JNy23jZaO/vZrsAnWCsO73asU7ylrHIe0fEsXbZYqBP9sMr+/+xP3CELDHJxUbzBY8zqGvQt1ULpyrG+Kw== +"@nomicfoundation/solidity-analyzer-freebsd-x64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-freebsd-x64/-/solidity-analyzer-freebsd-x64-0.1.0.tgz#dbc052dcdfd50ae50fd5ae1788b69b4e0fa40040" + integrity sha512-WFCZYMv86WowDA4GiJKnebMQRt3kCcFqHeIomW6NMyqiKqhK1kIZCxSLDYsxqlx396kKLPN1713Q1S8tu68GKg== -"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-gnu/-/solidity-analyzer-linux-arm64-gnu-0.0.3.tgz#1ba64b1d76425f8953dedc6367bd7dd46f31dfc5" - integrity sha512-Eyv50EfYbFthoOb0I1568p+eqHGLwEUhYGOxcRNywtlTE9nj+c+MT1LA53HnxD9GsboH4YtOOmJOulrjG7KtbA== +"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-gnu/-/solidity-analyzer-linux-arm64-gnu-0.1.0.tgz#e6b2eea633995b557e74e881d2a43eab4760903d" + integrity sha512-DTw6MNQWWlCgc71Pq7CEhEqkb7fZnS7oly13pujs4cMH1sR0JzNk90Mp1zpSCsCs4oKan2ClhMlLKtNat/XRKQ== -"@nomicfoundation/solidity-analyzer-linux-arm64-musl@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-musl/-/solidity-analyzer-linux-arm64-musl-0.0.3.tgz#8d864c49b55e683f7e3b5cce9d10b628797280ac" - integrity sha512-V8grDqI+ivNrgwEt2HFdlwqV2/EQbYAdj3hbOvjrA8Qv+nq4h9jhQUxFpegYMDtpU8URJmNNlXgtfucSrAQwtQ== +"@nomicfoundation/solidity-analyzer-linux-arm64-musl@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-musl/-/solidity-analyzer-linux-arm64-musl-0.1.0.tgz#af81107f5afa794f19988a368647727806e18dc4" + integrity sha512-wUpUnR/3GV5Da88MhrxXh/lhb9kxh9V3Jya2NpBEhKDIRCDmtXMSqPMXHZmOR9DfCwCvG6vLFPr/+YrPCnUN0w== -"@nomicfoundation/solidity-analyzer-linux-x64-gnu@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-gnu/-/solidity-analyzer-linux-x64-gnu-0.0.3.tgz#16e769500cf1a8bb42ab9498cee3b93c30f78295" - integrity sha512-uRfVDlxtwT1vIy7MAExWAkRD4r9M79zMG7S09mCrWUn58DbLs7UFl+dZXBX0/8FTGYWHhOT/1Etw1ZpAf5DTrg== +"@nomicfoundation/solidity-analyzer-linux-x64-gnu@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-gnu/-/solidity-analyzer-linux-x64-gnu-0.1.0.tgz#6877e1da1a06a9f08446070ab6e0a5347109f868" + integrity sha512-lR0AxK1x/MeKQ/3Pt923kPvwigmGX3OxeU5qNtQ9pj9iucgk4PzhbS3ruUeSpYhUxG50jN4RkIGwUMoev5lguw== -"@nomicfoundation/solidity-analyzer-linux-x64-musl@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-musl/-/solidity-analyzer-linux-x64-musl-0.0.3.tgz#75f4e1a25526d54c506e4eba63b3d698b6255b8f" - integrity sha512-8HPwYdLbhcPpSwsE0yiU/aZkXV43vlXT2ycH+XlOjWOnLfH8C41z0njK8DHRtEFnp4OVN6E7E5lHBBKDZXCliA== +"@nomicfoundation/solidity-analyzer-linux-x64-musl@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-musl/-/solidity-analyzer-linux-x64-musl-0.1.0.tgz#bb6cd83a0c259eccef4183796b6329a66cf7ebd9" + integrity sha512-A1he/8gy/JeBD3FKvmI6WUJrGrI5uWJNr5Xb9WdV+DK0F8msuOqpEByLlnTdLkXMwW7nSl3awvLezOs9xBHJEg== -"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-arm64-msvc/-/solidity-analyzer-win32-arm64-msvc-0.0.3.tgz#ef6e20cfad5eedfdb145cc34a44501644cd7d015" - integrity sha512-5WWcT6ZNvfCuxjlpZOY7tdvOqT1kIQYlDF9Q42wMpZ5aTm4PvjdCmFDDmmTvyXEBJ4WTVmY5dWNWaxy8h/E28g== +"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-arm64-msvc/-/solidity-analyzer-win32-arm64-msvc-0.1.0.tgz#9d4bca1cc9a1333fde985675083b0b7d165f6076" + integrity sha512-7x5SXZ9R9H4SluJZZP8XPN+ju7Mx+XeUMWZw7ZAqkdhP5mK19I4vz3x0zIWygmfE8RT7uQ5xMap0/9NPsO+ykw== -"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-ia32-msvc/-/solidity-analyzer-win32-ia32-msvc-0.0.3.tgz#98c4e3af9cee68896220fa7e270aefdf7fc89c7b" - integrity sha512-P/LWGZwWkyjSwkzq6skvS2wRc3gabzAbk6Akqs1/Iiuggql2CqdLBkcYWL5Xfv3haynhL+2jlNkak+v2BTZI4A== +"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-ia32-msvc/-/solidity-analyzer-win32-ia32-msvc-0.1.0.tgz#0db5bfc6aa952bea4098d8d2c8947b4e5c4337ee" + integrity sha512-m7w3xf+hnE774YRXu+2mGV7RiF3QJtUoiYU61FascCkQhX3QMQavh7saH/vzb2jN5D24nT/jwvaHYX/MAM9zUw== -"@nomicfoundation/solidity-analyzer-win32-x64-msvc@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-x64-msvc/-/solidity-analyzer-win32-x64-msvc-0.0.3.tgz#12da288e7ef17ec14848f19c1e8561fed20d231d" - integrity sha512-4AcTtLZG1s/S5mYAIr/sdzywdNwJpOcdStGF3QMBzEt+cGn3MchMaS9b1gyhb2KKM2c39SmPF5fUuWq1oBSQZQ== +"@nomicfoundation/solidity-analyzer-win32-x64-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-x64-msvc/-/solidity-analyzer-win32-x64-msvc-0.1.0.tgz#2e0f39a2924dcd77db6b419828595e984fabcb33" + integrity sha512-xCuybjY0sLJQnJhupiFAXaek2EqF0AP0eBjgzaalPXSNvCEN6ZYHvUzdA50ENDVeSYFXcUsYf3+FsD3XKaeptA== -"@nomicfoundation/solidity-analyzer@0.0.3": - version "0.0.3" - resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer/-/solidity-analyzer-0.0.3.tgz#d1029f872e66cb1082503b02cc8b0be12f8dd95e" - integrity sha512-VFMiOQvsw7nx5bFmrmVp2Q9rhIjw2AFST4DYvWVVO9PMHPE23BY2+kyfrQ4J3xCMFC8fcBbGLt7l4q7m1SlTqg== +"@nomicfoundation/solidity-analyzer@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer/-/solidity-analyzer-0.1.0.tgz#e5ddc43ad5c0aab96e5054520d8e16212e125f50" + integrity sha512-xGWAiVCGOycvGiP/qrlf9f9eOn7fpNbyJygcB0P21a1MDuVPlKt0Srp7rvtBEutYQ48ouYnRXm33zlRnlTOPHg== optionalDependencies: - "@nomicfoundation/solidity-analyzer-darwin-arm64" "0.0.3" - "@nomicfoundation/solidity-analyzer-darwin-x64" "0.0.3" - "@nomicfoundation/solidity-analyzer-freebsd-x64" "0.0.3" - "@nomicfoundation/solidity-analyzer-linux-arm64-gnu" "0.0.3" - "@nomicfoundation/solidity-analyzer-linux-arm64-musl" "0.0.3" - "@nomicfoundation/solidity-analyzer-linux-x64-gnu" "0.0.3" - "@nomicfoundation/solidity-analyzer-linux-x64-musl" "0.0.3" - "@nomicfoundation/solidity-analyzer-win32-arm64-msvc" "0.0.3" - "@nomicfoundation/solidity-analyzer-win32-ia32-msvc" "0.0.3" - "@nomicfoundation/solidity-analyzer-win32-x64-msvc" "0.0.3" + "@nomicfoundation/solidity-analyzer-darwin-arm64" "0.1.0" + "@nomicfoundation/solidity-analyzer-darwin-x64" "0.1.0" + "@nomicfoundation/solidity-analyzer-freebsd-x64" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-arm64-gnu" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-arm64-musl" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-x64-gnu" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-x64-musl" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-arm64-msvc" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-ia32-msvc" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-x64-msvc" "0.1.0" "@sentry/core@5.30.0": version "5.30.0" @@ -770,10 +770,10 @@ dependencies: antlr4ts "^0.5.0-alpha.4" -"@solidity-parser/parser@^0.14.3": - version "0.14.3" - resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.3.tgz#0d627427b35a40d8521aaa933cc3df7d07bfa36f" - integrity sha512-29g2SZ29HtsqA58pLCtopI1P/cPy5/UAzlcAXO6T/CNJimG6yA8kx4NaseMyJULiC+TEs02Y9/yeHzClqoA0hw== +"@solidity-parser/parser@^0.14.5": + version "0.14.5" + resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.5.tgz#87bc3cc7b068e08195c219c91cd8ddff5ef1a804" + integrity sha512-6dKnHZn7fg/iQATVEzqyUOyEidbn05q7YA2mQ9hC0MMXhhV3/JrsxmFSYZAcr7j1yUP700LLhTruvJ3MiQmjJg== dependencies: antlr4ts "^0.5.0-alpha.4" @@ -881,6 +881,11 @@ "@types/level-errors" "*" "@types/node" "*" +"@types/lodash@^4.14.185": + version "4.14.185" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.185.tgz#c9843f5a40703a8f5edfd53358a58ae729816908" + integrity sha512-evMDG1bC4rgQg4ku9tKpuMh5iBNEwNa3tf9zRHdP1qlv+1WUg44xat4IxCE14gIpZRGUUWAx2VhItCZc25NfMA== + "@types/lru-cache@^5.1.0": version "5.1.1" resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.1.tgz#c48c2e27b65d2a153b19bfc1a317e30872e01eef" @@ -932,6 +937,11 @@ dependencies: "@types/node" "*" +"@types/semver@^7.3.12": + version "7.3.12" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.12.tgz#920447fdd78d76b19de0438b7f60df3c4a80bf1c" + integrity sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A== + "@types/sinon@10.0.6": version "10.0.6" resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.6.tgz#bc3faff5154e6ecb69b797d311b7cf0c1b523a1d" @@ -1716,10 +1726,10 @@ elliptic@6.5.4, elliptic@^6.5.2, elliptic@^6.5.4: minimalistic-assert "^1.0.1" minimalistic-crypto-utils "^1.0.1" -emoji-regex@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.1.0.tgz#d50e383743c0f7a5945c47087295afc112e3cf66" - integrity sha512-xAEnNCT3w2Tg6MA7ly6QqYJvEoY1tm9iIjJ3yMKK9JPlWuRHAMoe5iETwQnx3M9TVbFMfsrBgWKR+IsmswwNjg== +emoji-regex@^10.2.1: + version "10.2.1" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.2.1.tgz#a41c330d957191efd3d9dfe6e1e8e1e9ab048b3f" + integrity sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA== emoji-regex@^7.0.1: version "7.0.3" @@ -3080,7 +3090,7 @@ lodash.truncate@^4.4.2: resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= -lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15: +lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -3671,15 +3681,15 @@ prelude-ls@^1.2.1: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prettier-plugin-solidity@1.0.0-beta.24: - version "1.0.0-beta.24" - resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-beta.24.tgz#67573ca87098c14f7ccff3639ddd8a4cab2a87eb" - integrity sha512-6JlV5BBTWzmDSq4kZ9PTXc3eLOX7DF5HpbqmmaF+kloyUwOZbJ12hIYsUaZh2fVgZdV2t0vWcvY6qhILhlzgqg== +prettier-plugin-solidity@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0.tgz#5b23f48cc9c28a1246c6dd89af117234b813f48b" + integrity sha512-gRJCeZ7imbWtNYN2SudjJoPmka5r6jcd2cSTV6FC3pVCtY6LFZbeQQjpKufUEp88hXBAAnkOTOh7TA5xwj9M3A== dependencies: - "@solidity-parser/parser" "^0.14.3" - emoji-regex "^10.1.0" + "@solidity-parser/parser" "^0.14.5" + emoji-regex "^10.2.1" escape-string-regexp "^4.0.0" - semver "^7.3.7" + semver "^7.3.8" solidity-comments-extractor "^0.0.7" string-width "^4.2.3" @@ -3945,6 +3955,13 @@ semver@^7.3.7: dependencies: lru-cache "^6.0.0" +semver@^7.3.8: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + serialize-error@8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-8.1.0.tgz#3a069970c712f78634942ddd50fbbc0eaebe2f67" diff --git a/test/integration/helpers/assertions.ts b/test/integration/helpers/assertions.ts index 295a500d..70744d0a 100644 --- a/test/integration/helpers/assertions.ts +++ b/test/integration/helpers/assertions.ts @@ -2,6 +2,8 @@ "use strict"; import * as vscode from "vscode"; import * as assert from "assert"; +import { getCurrentEditor } from "./editor"; +import { sleep } from "./sleep"; export function rangeEqual( range: vscode.Range, @@ -115,3 +117,19 @@ export async function checkOrWaitDiagnostic( vscode.languages.onDidChangeDiagnostics(checkDiagnostics); }); } + +const TIMEOUT = 10000; +export async function assertCurrentTabFile(expectedUri: string) { + const start = new Date().getTime(); + let currentUri = ""; + + while (new Date().getTime() - start < TIMEOUT) { + currentUri = getCurrentEditor().document.fileName; + if (currentUri === expectedUri) return; + await sleep(100); + } + + throw new Error( + `Waited ${TIMEOUT} ms for current tab to be ${expectedUri} but it was ${currentUri}` + ); +} diff --git a/test/integration/index.ts b/test/integration/index.ts index b1f10e4e..5889b60f 100644 --- a/test/integration/index.ts +++ b/test/integration/index.ts @@ -3,6 +3,7 @@ import path from "path"; import Mocha from "mocha"; // eslint-disable-next-line import/no-extraneous-dependencies import glob from "glob"; +import vscode from "vscode"; import { sleep } from "./helpers/sleep"; export function run(): Promise { @@ -12,7 +13,15 @@ export function run(): Promise { color: true, rootHooks: { beforeAll: async () => { - await sleep(5000); // Wait for the extension to be loaded + // Wait until extension is fully initialized (index + analysis + validation ready) + while ( + vscode.extensions + .getExtension("nomicfoundation.hardhat-solidity") + ?.exports.isReady() !== true + ) { + await sleep(100); + } + // await sleep(5000); // Wait for the extension to be loaded }, }, timeout: 30000, diff --git a/server/test/solFileDetails/testData/outwith.sol b/test/integration/projects/main/contracts/completion/Empty.sol similarity index 100% rename from server/test/solFileDetails/testData/outwith.sol rename to test/integration/projects/main/contracts/completion/Empty.sol diff --git a/test/integration/projects/main/contracts/definition/Circular1.sol b/test/integration/projects/main/contracts/definition/Circular1.sol new file mode 100644 index 00000000..318cf040 --- /dev/null +++ b/test/integration/projects/main/contracts/definition/Circular1.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity ^0.8.4; + +import "./Circular2.sol"; + +contract Circular1 { + Circular2 c2; +} diff --git a/test/integration/projects/main/contracts/definition/Circular2.sol b/test/integration/projects/main/contracts/definition/Circular2.sol new file mode 100644 index 00000000..26f9f730 --- /dev/null +++ b/test/integration/projects/main/contracts/definition/Circular2.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity ^0.8.4; + +import "./Circular1.sol"; + +contract Circular2 { + Circular1 c1; +} diff --git a/test/integration/projects/projectless/lib/Quz.sol b/test/integration/projects/projectless/lib/Quz.sol new file mode 100644 index 00000000..11c4af46 --- /dev/null +++ b/test/integration/projects/projectless/lib/Quz.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity ^0.8.7; + +contract Quz { + constructor(uint asd) {} +} diff --git a/test/integration/projects/projectless/src/Bar.sol b/test/integration/projects/projectless/src/Bar.sol new file mode 100644 index 00000000..e1e7d9bb --- /dev/null +++ b/test/integration/projects/projectless/src/Bar.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity ^0.8.7; + +import "./Baz.sol"; + +contract Bar { + function magic() public pure returns (uint) { + return 12345; + } +} diff --git a/test/integration/projects/projectless/src/Baz.sol b/test/integration/projects/projectless/src/Baz.sol new file mode 100644 index 00000000..51c0be93 --- /dev/null +++ b/test/integration/projects/projectless/src/Baz.sol @@ -0,0 +1,5 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity >=0.8.0 <0.8.16; + +contract Baz {} diff --git a/test/integration/projects/projectless/src/CompilerError.sol b/test/integration/projects/projectless/src/CompilerError.sol new file mode 100644 index 00000000..02a9454e --- /dev/null +++ b/test/integration/projects/projectless/src/CompilerError.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity ^0.8.7; + +contract CompilerError { + uint256 foo = "abc"; +} diff --git a/test/integration/projects/projectless/src/Foo.sol b/test/integration/projects/projectless/src/Foo.sol new file mode 100644 index 00000000..c3d22338 --- /dev/null +++ b/test/integration/projects/projectless/src/Foo.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity ^0.8.7; + +import "./Bar.sol"; +import "./Baz.sol"; +import "../lib/Quz.sol"; + +contract Foo { + Bar bar = new Bar(); + + Quz quz = new Quz(123242); + + constructor(uint256 baz) {} + + function foo() public returns (uint256) { + return bar.magic(); + } +} diff --git a/test/integration/projects/projectless/src/ImportNonexistent.sol b/test/integration/projects/projectless/src/ImportNonexistent.sol new file mode 100644 index 00000000..bc14dd6d --- /dev/null +++ b/test/integration/projects/projectless/src/ImportNonexistent.sol @@ -0,0 +1,7 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity ^0.8.7; + +import "./NonExistent.sol"; + +contract Foo {} diff --git a/server/test/solFileDetails/testData/project/within.sol b/test/integration/projects/remappings/foundry.toml similarity index 100% rename from server/test/solFileDetails/testData/project/within.sol rename to test/integration/projects/remappings/foundry.toml diff --git a/test/integration/projects/main/contracts/completion/AddSemicolon.sol b/test/integration/projects/remappings/src/Empty.sol similarity index 100% rename from test/integration/projects/main/contracts/completion/AddSemicolon.sol rename to test/integration/projects/remappings/src/Empty.sol diff --git a/test/integration/tests/completion/completion.test.ts b/test/integration/tests/completion/completion.test.ts index 2721076a..ea454f75 100644 --- a/test/integration/tests/completion/completion.test.ts +++ b/test/integration/tests/completion/completion.test.ts @@ -1,15 +1,14 @@ import * as assert from "assert"; import vscode from "vscode"; +import os from "os"; import { openFileInEditor, waitForUI } from "../../helpers/editor"; import { type } from "../../helpers/commands"; import { sleep } from "../../helpers/sleep"; import { getTestContractUri } from "../../helpers/getTestContract"; suite("completion", function () { - test("[completion] - add semicolon automatically after import", async () => { - const uri = getTestContractUri( - "main/contracts/completion/AddSemicolon.sol" - ); + test("[completion] - hardhat node_modules contract import completion on empty", async () => { + const uri = getTestContractUri("main/contracts/completion/Empty.sol"); const editor = await openFileInEditor(uri); const document = editor.document; @@ -18,15 +17,71 @@ suite("completion", function () { await vscode.commands.executeCommand("acceptSelectedSuggestion"); await waitForUI(); assert.equal(document.getText(), "import '@openzeppelin';"); + }); - await type(document, "/"); - await sleep(1000); + test("[completion] - hardhat node_modules contract import completion on partial specification", async () => { + const uri = getTestContractUri("main/contracts/completion/Empty.sol"); + const editor = await openFileInEditor(uri); + const document = editor.document; + + await type(document, "import '@openzep"); + await sleep(2000); await vscode.commands.executeCommand("acceptSelectedSuggestion"); + await waitForUI(); + assert.equal(document.getText(), "import '@openzeppelin';"); + }); + test("[completion] - hardhat node_modules contract import completion on module specified", async () => { + const uri = getTestContractUri("main/contracts/completion/Empty.sol"); + const editor = await openFileInEditor(uri); + const document = editor.document; + + await type(document, "import '@openzeppelin/"); + await sleep(1000); + await vscode.commands.executeCommand("acceptSelectedSuggestion"); await waitForUI(); assert.equal( document.getText(), "import '@openzeppelin/contracts/access/AccessControl.sol';" ); }); + + test("[completion] - hardhat node_modules contract import completion on module and partial contract", async () => { + const uri = getTestContractUri("main/contracts/completion/Empty.sol"); + const editor = await openFileInEditor(uri); + const document = editor.document; + + await type(document, "import '@openzeppelin/erc7"); + await sleep(1000); + await vscode.commands.executeCommand("acceptSelectedSuggestion"); + await waitForUI(); + assert.equal( + document.getText(), + "import '@openzeppelin/contracts/token/ERC721/ERC721.sol';" + ); + }); + + test("[completion] - foundry import completions through remappings", async () => { + // Not running this on windows until we figure out foundry setup on the CI + if (os.platform() === "win32") { + return; + } + + const uri = getTestContractUri("remappings/src/Empty.sol"); + const editor = await openFileInEditor(uri); + const document = editor.document; + + await type(document, "import '@"); + await sleep(2000); + await vscode.commands.executeCommand("acceptSelectedSuggestion"); + await waitForUI(); + assert.equal(document.getText(), "import '@lib';"); + + await type(document, "/"); + await sleep(1000); + await vscode.commands.executeCommand("acceptSelectedSuggestion"); + + await waitForUI(); + assert.equal(document.getText(), "import '@lib/myLib/Imported.sol';"); + }); }); diff --git a/test/integration/tests/definition/definition.test.ts b/test/integration/tests/definition/definition.test.ts index a1cfae76..e438f56a 100644 --- a/test/integration/tests/definition/definition.test.ts +++ b/test/integration/tests/definition/definition.test.ts @@ -1,10 +1,14 @@ -import path from "path"; -import { Uri } from "vscode"; -import { getClient } from "../../client"; -import { Client } from "../../common/types"; -import { assertLspCommand } from "../../common/assertLspCommand"; +import vscode from "vscode"; import { getTestContractUri } from "../../helpers/getTestContract"; -import { getRootPath } from "../../helpers/workspace"; +import { + getCurrentEditor, + goToPosition, + openFileInEditor, +} from "../../helpers/editor"; +import { + assertCurrentTabFile, + assertPositionEqual, +} from "../../helpers/assertions"; suite("Single-file Navigation", function () { const testUri = getTestContractUri("main/contracts/definition/Test.sol"); @@ -12,194 +16,126 @@ suite("Single-file Navigation", function () { "main/contracts/definition/ImportTest.sol" ); - let client!: Client; - - suiteSetup(async () => { - client = await getClient(); - }); + const circular1Uri = getTestContractUri( + "main/contracts/definition/Circular1.sol" + ); + const circular2Uri = getTestContractUri( + "main/contracts/definition/Circular2.sol" + ); test("[Single-file] - Go to Definition", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: testUri.path, - params: { - position: { - line: 14, - character: 25, - }, - }, - expected: [ - { - uri: { - path: getTestContractUri("main/contracts/definition/Test.sol").path, - }, - range: [ - { - line: 9, - character: 11, - }, - { - line: 9, - character: 16, - }, - ], - }, - ], - }); + await openFileInEditor(testUri); + + goToPosition(new vscode.Position(14, 25)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(testUri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(9, 11) + ); }); test("[Single-file][Defined after usage] - Go to Definition", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: testUri.path, - params: { - position: { - line: 15, - character: 9, - }, - }, - expected: [ - { - uri: { - path: getTestContractUri("main/contracts/definition/Test.sol").path, - }, - range: [ - { - line: 53, - character: 11, - }, - { - line: 53, - character: 19, - }, - ], - }, - ], - }); + await openFileInEditor(testUri); + + goToPosition(new vscode.Position(15, 9)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(testUri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(53, 11) + ); }); test("[Single-file][MemberAccess] - Go to Definition", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: testUri.path, - params: { - position: { - line: 26, - character: 25, - }, - }, - expected: [ - { - uri: { - path: getTestContractUri("main/contracts/definition/Test.sol").path, - }, - range: [ - { - line: 10, - character: 13, - }, - { - line: 10, - character: 18, - }, - ], - }, - ], - }); + // vscode.extensions.getExtension("nomicfoundation.hardhat-solidity"); + + await openFileInEditor(testUri); + + goToPosition(new vscode.Position(26, 25)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(testUri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(10, 13) + ); }); test("[Single-file][MemberAccess][Defined after usage] - Go to Definition", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: testUri.path, - params: { - position: { - line: 50, - character: 50, - }, - }, - expected: [ - { - uri: { - path: getTestContractUri("main/contracts/definition/Test.sol").path, - }, - range: [ - { - line: 54, - character: 16, - }, - { - line: 54, - character: 20, - }, - ], - }, - ], - }); + await openFileInEditor(testUri); + + goToPosition(new vscode.Position(50, 50)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(testUri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(54, 16) + ); }); test("Jump to import file", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: importTestUri.path, - params: { - position: { - line: 3, - character: 25, - }, - }, - expected: [ - { - uri: { - path: getTestContractUri("main/contracts/definition/Foo.sol").path, - }, - range: [ - { - line: 1, - character: 0, - }, - { - line: 6, - character: 0, - }, - ], - }, - ], - }); + await openFileInEditor(importTestUri); + + goToPosition(new vscode.Position(3, 25)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile( + getTestContractUri("main/contracts/definition/Foo.sol").fsPath + ); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(1, 0) + ); }); test("Jump to import dependency file", async () => { - await assertLspCommand(client, { - action: "DefinitionRequest", - uri: importTestUri.path, - params: { - position: { - line: 4, - character: 73, - }, - }, - expected: [ - { - uri: { - path: Uri.file( - path.join( - getRootPath(), - "node_modules/@openzeppelin/contracts/access/Ownable.sol" - ) - ).path, - }, - range: [ - { - line: 3, - character: 0, - }, - { - line: 76, - character: 0, - }, - ], - }, - ], - }); + await openFileInEditor(importTestUri); + + goToPosition(new vscode.Position(4, 73)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile( + getTestContractUri( + "../node_modules/@openzeppelin/contracts/access/Ownable.sol" + ).fsPath + ); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(3, 0) + ); + }); + + test("Circular dependencies navigation", async () => { + await openFileInEditor(circular1Uri); + + goToPosition(new vscode.Position(6, 6)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(circular2Uri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(5, 9) + ); + + goToPosition(new vscode.Position(3, 14)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(circular1Uri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(1, 0) + ); }); }); diff --git a/test/integration/tests/projectless/projectless.test.ts b/test/integration/tests/projectless/projectless.test.ts new file mode 100644 index 00000000..23034bdd --- /dev/null +++ b/test/integration/tests/projectless/projectless.test.ts @@ -0,0 +1,93 @@ +import * as vscode from "vscode"; +import { getTestContractUri } from "../../helpers/getTestContract"; +import { + getCurrentEditor, + goToPosition, + openFileInEditor, +} from "../../helpers/editor"; +import { + assertCurrentTabFile, + assertPositionEqual, + checkOrWaitDiagnostic, +} from "../../helpers/assertions"; +import { sleep } from "../../helpers/sleep"; + +suite("projectless", function () { + this.beforeEach(async () => { + await sleep(1000); + }); + + test("[navigation] jump to definition", async () => { + const importerUri = getTestContractUri("projectless/src/Foo.sol"); + const importedUri = getTestContractUri("projectless/lib/Quz.sol"); + + // Go to Quz.sol from usage line + const importerEditor = await openFileInEditor(importerUri); + + goToPosition(new vscode.Position(6, 16)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(importedUri.fsPath); + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(2, 0) + ); + + // Go to Quz.sol from usage line + await vscode.window.showTextDocument(importerEditor.document); + + goToPosition(new vscode.Position(11, 17)); + + await vscode.commands.executeCommand("editor.action.goToDeclaration"); + + await assertCurrentTabFile(importedUri.fsPath); + + assertPositionEqual( + getCurrentEditor().selection.active, + new vscode.Position(4, 9) + ); + }); + + test("[validation] compiler warning", async () => { + const uri = getTestContractUri("projectless/src/Foo.sol"); + + await openFileInEditor(uri); + + await checkOrWaitDiagnostic( + uri, + new vscode.Range(15, 11, 15, 14), + vscode.DiagnosticSeverity.Warning, + "solidity", + "Function state mutability can be restricted to view" + ); + }); + + test("[validation] compiler error", async () => { + const uri = getTestContractUri("projectless/src/CompilerError.sol"); + + await openFileInEditor(uri); + + await checkOrWaitDiagnostic( + uri, + new vscode.Range(5, 16, 5, 21), + vscode.DiagnosticSeverity.Error, + "solidity", + "not implicitly convertible to expected type uint256" + ); + }); + + test("[validation] non existent import", async () => { + const uri = getTestContractUri("projectless/src/ImportNonexistent.sol"); + + await openFileInEditor(uri); + + await checkOrWaitDiagnostic( + uri, + new vscode.Range(4, 0, 4, 27), + vscode.DiagnosticSeverity.Error, + "solidity", + "File not found" + ); + }); +}); diff --git a/test/integration/tests/remappings/remappings.test.ts b/test/integration/tests/remappings/remappings.test.ts index 4aa6e7a9..37209d89 100644 --- a/test/integration/tests/remappings/remappings.test.ts +++ b/test/integration/tests/remappings/remappings.test.ts @@ -1,15 +1,23 @@ import * as vscode from "vscode"; -import assert from "assert"; +import os from "os"; import { getTestContractUri } from "../../helpers/getTestContract"; import { getCurrentEditor, goToPosition, openFileInEditor, } from "../../helpers/editor"; -import { assertPositionEqual } from "../../helpers/assertions"; +import { + assertCurrentTabFile, + assertPositionEqual, +} from "../../helpers/assertions"; suite("remappings", function () { test("[remappings] multiple navigations", async () => { + // Not running this on windows until we figure out foundry setup on the CI + if (os.platform() === "win32") { + return; + } + const importerUri = getTestContractUri("remappings/src/Importer.sol"); const importedUri = getTestContractUri("remappings/lib/myLib/Imported.sol"); const otherImportedUri = getTestContractUri( @@ -23,7 +31,7 @@ suite("remappings", function () { await vscode.commands.executeCommand("editor.action.goToDeclaration"); - assert.equal(getCurrentEditor().document.fileName, importedUri.fsPath); + await assertCurrentTabFile(importedUri.fsPath); assertPositionEqual( getCurrentEditor().selection.active, new vscode.Position(2, 0) @@ -36,7 +44,7 @@ suite("remappings", function () { await vscode.commands.executeCommand("editor.action.goToDeclaration"); - assert.equal(getCurrentEditor().document.fileName, otherImportedUri.fsPath); + await assertCurrentTabFile(otherImportedUri.fsPath); assertPositionEqual( getCurrentEditor().selection.active, new vscode.Position(2, 0) @@ -49,7 +57,7 @@ suite("remappings", function () { await vscode.commands.executeCommand("editor.action.goToDeclaration"); - assert.equal(getCurrentEditor().document.fileName, importedUri.fsPath); + await assertCurrentTabFile(importedUri.fsPath); assertPositionEqual( getCurrentEditor().selection.active, new vscode.Position(4, 9) @@ -62,7 +70,7 @@ suite("remappings", function () { await vscode.commands.executeCommand("editor.action.goToDeclaration"); - assert.equal(getCurrentEditor().document.fileName, otherImportedUri.fsPath); + await assertCurrentTabFile(otherImportedUri.fsPath); assertPositionEqual( getCurrentEditor().selection.active, new vscode.Position(4, 9)