Skip to content

Commit

Permalink
feat(vscode): Add linter plugin to vscode extension (oxc-project#813)
Browse files Browse the repository at this point in the history
  • Loading branch information
u9g authored Aug 30, 2023
1 parent 66e883a commit a38619b
Show file tree
Hide file tree
Showing 9 changed files with 315 additions and 229 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions crates/oxc_linter_plugin/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#[cfg(test)]
mod errors;
#[cfg(test)]
mod plugin;
#[cfg(test)]
mod raw_diagnostic;
#[cfg(test)]
mod spans;
#[cfg(test)]
mod test;
mod util;

pub use {plugin::LinterPlugin, util::make_relative_path_parts};
225 changes: 31 additions & 194 deletions crates/oxc_linter_plugin/src/plugin.rs
Original file line number Diff line number Diff line change
@@ -1,28 +1,16 @@
use std::{collections::BTreeMap, fmt::Debug, fs, path::PathBuf, rc::Rc, sync::Arc};

use crate::{
errors::{ErrorFromLinterPlugin, SpanStartOrEnd},
raw_diagnostic::RawPluginDiagnostic,
};
use ignore::Walk;
use miette::{NamedSource, SourceSpan};
use oxc_allocator::Allocator;
use oxc_diagnostics::{
miette::{self},
Report,
};
use oxc_diagnostics::miette::{self};
use oxc_linter::LintContext;
use oxc_parser::Parser;
use oxc_query::{schema, Adapter};
use oxc_semantic::{SemanticBuilder, SemanticBuilderReturn};
use oxc_span::SourceType;
use serde::Deserialize;
use trustfall::{execute_query, FieldValue, Schema, TransparentValue};

use crate::{
errors::{
ErrorFromLinterPlugin, ExpectedTestToFailButPassed, ExpectedTestToPassButFailed,
SpanStartOrEnd, UnexpectedErrorsInFailTest,
},
raw_diagnostic::RawPluginDiagnostic,
spans::{span_of_test_n, PassOrFail},
};
use trustfall::{execute_query, FieldValue, TransparentValue};

/// Represents a single parsed yaml plugin file. Includes
/// the query, tests, and metadata about the query.
Expand Down Expand Up @@ -56,26 +44,15 @@ pub struct SingleTest {
/// Holds multiple parsed rules.
#[derive(Debug)]
pub struct LinterPlugin {
rules: Vec<InputQuery>,
schema: &'static Schema,
}

/// Whether to run all rules or only a specific rule.
#[allow(dead_code)]
pub enum RulesToRun {
/// Execute all rules.
All,
/// The rules to run will be the rules whose names are equal
/// to the string stored in the variant.
Only(String),
pub(crate) rules: Vec<InputQuery>,
}

impl LinterPlugin {
/// Parses all queries in the directory provided, going down into nested directories looking for .yml files.
///
/// # Errors
/// This function will error if it can't read a file, or if it can't parse a query
pub fn new(schema: &'static Schema, queries_path: &PathBuf) -> oxc_diagnostics::Result<Self> {
pub fn new(queries_path: &PathBuf) -> oxc_diagnostics::Result<Self> {
let mut deserialized_queries = vec![];

for dir_entry_found_maybe in Walk::new(queries_path) {
Expand All @@ -95,7 +72,7 @@ impl LinterPlugin {
}
}

Ok(Self { rules: deserialized_queries, schema })
Ok(Self { rules: deserialized_queries })
}

/// Run specific plugin rule by reference on parsed code.
Expand All @@ -109,7 +86,6 @@ impl LinterPlugin {
// the Adapter trait is implemented for a &Adapter, not just Adapter
#[allow(clippy::redundant_allocation)]
fn run_specific_plugin_rule(
&self,
ctx: &mut LintContext,
plugin: &InputQuery,
adapter: &Arc<&Adapter<'_>>,
Expand All @@ -125,7 +101,7 @@ impl LinterPlugin {
let query_span = SourceSpan::new(0.into(), plugin.query.len().into());

let query_results =
execute_query(self.schema, Arc::clone(adapter), &plugin.query, plugin.args.clone())
execute_query(schema(), Arc::clone(adapter), &plugin.query, plugin.args.clone())
.map_err(|err| ErrorFromLinterPlugin::Trustfall {
error_message: err.to_string(),
query_source: Arc::clone(&query_source),
Expand Down Expand Up @@ -195,7 +171,7 @@ impl LinterPlugin {
Ok(())
}

/// Run specific plugin rule by name or multiple plugin rules on parsed code.
/// Run all plugin rules on parsed code.
///
/// # Errors
/// Any errors that occur while linting the file, such as if the file can't be read,
Expand All @@ -205,172 +181,33 @@ impl LinterPlugin {
&self,
ctx: &mut LintContext,
relative_file_path_parts: Vec<Option<String>>,
rules_to_run: RulesToRun,
) -> oxc_diagnostics::Result<()> {
let inner = Adapter::new(Rc::clone(ctx.semantic()), relative_file_path_parts);
let adapter = Arc::from(&inner);
if let RulesToRun::Only(this_rule) = rules_to_run {
for rule in self.rules.iter().filter(|x| x.name == this_rule) {
self.run_specific_plugin_rule(ctx, rule, &adapter)?;
}
} else {
for rule in &self.rules {
self.run_specific_plugin_rule(ctx, rule, &adapter)?;
}
for rule in &self.rules {
Self::run_specific_plugin_rule(ctx, rule, &adapter)?;
}
Ok(())
}
}

/// Run one individual test on unparsed code.
fn run_individual_test(
test: &SingleTest,
rule_name: &str,
plugin: &LinterPlugin,
) -> std::result::Result<Vec<Report>, Vec<Report>> {
let file_path = &test.relative_path.last().expect("there to be atleast 1 path part");
let source_text = &test.code;

let allocator = Allocator::default();
let source_type = SourceType::from_path(file_path).unwrap();
let ret = Parser::new(&allocator, source_text, source_type).parse();

// Handle parser errors
if !ret.errors.is_empty() {
return Err(ret.errors);
}

let program = allocator.alloc(ret.program);
let SemanticBuilderReturn { semantic, errors } =
SemanticBuilder::new(source_text, source_type).with_trivias(ret.trivias).build(program);

// Handle semantic errors
if !errors.is_empty() {
return Err(errors);
}

let semantic = Rc::new(semantic);

let mut lint_ctx = LintContext::new(&Rc::clone(&semantic));

let result = plugin.lint_file(
&mut lint_ctx,
test.relative_path.iter().map(|el| Some(el.clone())).collect::<Vec<_>>(),
RulesToRun::Only(rule_name.to_string()),
);

// Handle query errors
if let Some(err) = result.err() {
return Err(vec![err]);
}

// Return plugin made errors
Ok(lint_ctx.into_message().into_iter().map(|m| m.error).collect::<Vec<_>>())
}

/// Enumerates and tests all queries at the path given.
/// # Errors
/// Unable to read any of the yaml rule files or unable to parse any of the yaml rule files,
/// or if any test expected to pass but failed, or if any test expected to fail but passed,
/// or query execution errors such as if the `span_start` and `span_end` are not both
/// understood types by the error reporting system.
pub fn test_queries(queries_to_test: &PathBuf) -> oxc_diagnostics::Result<()> {
let plugin = LinterPlugin::new(schema(), queries_to_test)?;

for rule in &plugin.rules {
for (ix, test) in rule.tests.pass.iter().enumerate() {
let diagnostics_collected = run_individual_test(test, &rule.name, &plugin);
let source = Arc::new(NamedSource::new(
format!("./{}", test.relative_path.join("/")),
test.code.clone(),
));

match diagnostics_collected {
Err(errs) | Ok(errs) if !errs.is_empty() => {
let yaml_text =
fs::read_to_string(&rule.path).map_err(ErrorFromLinterPlugin::ReadFile)?;

let errors_with_code = errs
.into_iter()
.map(|e| {
// Don't change the sourcecode of errors that already have their own sourcecode
if e.source_code().is_some() {
e
} else {
// Add js code to errors that don't have code yet
e.with_source_code(Arc::clone(&source))
}
})
.collect();

return Err(ExpectedTestToPassButFailed {
errors: errors_with_code,
err_span: span_of_test_n(&yaml_text, ix, &test.code, &PassOrFail::Pass),
query: NamedSource::new(rule.path.to_string_lossy(), yaml_text),
}
.into());
}
_ => { /* Ignore the empty diagnostics, as it means the test passed. */ }
};
}

for (i, test) in rule.tests.fail.iter().enumerate() {
let diagnostics_collected = run_individual_test(test, &rule.name, &plugin);
let source = Arc::new(NamedSource::new(
format!("./{}", test.relative_path.join("/")),
test.code.clone(),
));

match diagnostics_collected {
Ok(errs)
if errs.len() == 1 // TODO: Handle more than one error
&& matches!(
errs[0].downcast_ref::<ErrorFromLinterPlugin>(),
Some(ErrorFromLinterPlugin::PluginGenerated(..))
) =>
{ /* Success case. */ }
Ok(errs) if errs.is_empty() => {
let yaml_text =
fs::read_to_string(&rule.path).map_err(ErrorFromLinterPlugin::ReadFile)?;

return Err(ExpectedTestToFailButPassed {
err_span: span_of_test_n(&yaml_text, i, &test.code, &PassOrFail::Fail),
query: NamedSource::new(rule.path.to_string_lossy(), yaml_text),
}
.into());
}
Err(errs) | Ok(errs) => {
let yaml_text =
fs::read_to_string(&rule.path).map_err(ErrorFromLinterPlugin::ReadFile)?;

return Err(UnexpectedErrorsInFailTest {
errors: errs
.into_iter()
.map(|e| {
// Don't change the sourcecode of errors that already have their own sourcecode
if e.source_code().is_some() {
e
} else {
e.with_source_code(Arc::clone(&source))
}
})
.collect(),
err_span: span_of_test_n(&yaml_text, i, &test.code, &PassOrFail::Fail),
query: NamedSource::new(rule.path.to_string_lossy(), yaml_text),
}
.into());
}
}
}

if rule.tests.pass.len() + rule.tests.fail.len() > 0 {
println!(
"{} passed {} tests successfully.\n",
rule.name,
rule.tests.pass.len() + rule.tests.fail.len()
);
/// Run specific plugin rule by name rules on parsed code.
///
/// # Errors
/// Any errors that occur while linting the file, such as if the file can't be read,
/// or if the file can't be parsed, or if the query can't be executed, or if the query's
/// output types are wrong.
#[cfg(test)]
pub(crate) fn lint_file_with_rule(
&self,
ctx: &mut LintContext,
relative_file_path_parts: Vec<Option<String>>,
rule_name: &str,
) -> oxc_diagnostics::Result<()> {
let inner = Adapter::new(Rc::clone(ctx.semantic()), relative_file_path_parts);
let adapter = Arc::from(&inner);
for rule in self.rules.iter().filter(|x| x.name == rule_name) {
Self::run_specific_plugin_rule(ctx, rule, &adapter)?;
}
Ok(())
}

Ok(())
}
2 changes: 1 addition & 1 deletion crates/oxc_linter_plugin/src/spans.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::fmt::Display;

use located_yaml::{YamlElt, YamlLoader};
use miette::SourceSpan;
use oxc_diagnostics::miette::SourceSpan;

/// Whether a rule is under the pass or the fail column of the plugin file.
pub enum PassOrFail {
Expand Down
Loading

0 comments on commit a38619b

Please sign in to comment.