diff --git a/.readthedocs.yaml b/.config/.readthedocs.yaml similarity index 100% rename from .readthedocs.yaml rename to .config/.readthedocs.yaml diff --git a/cliff.toml b/.config/cliff.toml similarity index 100% rename from cliff.toml rename to .config/cliff.toml diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000..00d072c --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,31 @@ +# required minimum nextest version +nextest-version = "0.9.77" + +[profile.default] +# A profile to run most tests, except tests that run longer than 10 seconds +default-filter = "not test(#*rate_limit_secondary) - test(#git::test::with_*)" + +# This will flag any test that runs longer than 10 seconds. Useful when writing new tests. +slow-timeout = "10s" + +[profile.ci] +# A profile to run only tests that use clang-tidy and/or clang-format +# NOTE: This profile is intended to keep CI runtime low. Locally, use default or all profiles + +# This is all tests in tests/ folder + unit test for --extra-args. +default-filter = "kind(test) + test(#*use_extra_args)" + +# show wich tests were skipped +status-level = "skip" + +# show log output from each test +success-output = "final" +failure-output = "immediate-final" + +[profile.all] +# A profile to run all tests (including tests that run longer than 10 seconds) +default-filter = "all()" + +# Revert slow-timeout to nextest default value. +# Otherwise, default profile value (10s) is inherited. +slow-timeout = "60s" diff --git a/.github/workflows/bump-n-release.yml b/.github/workflows/bump-n-release.yml index 12c5fd0..f764aa5 100644 --- a/.github/workflows/bump-n-release.yml +++ b/.github/workflows/bump-n-release.yml @@ -54,7 +54,7 @@ jobs: uses: orhun/git-cliff-action@v4 id: git-cliff with: - config: cliff.toml + config: .config/cliff.toml args: --unreleased env: OUTPUT: ${{ runner.temp }}/changes.md diff --git a/.github/workflows/bump_version.py b/.github/workflows/bump_version.py index 523cae8..ec616c2 100644 --- a/.github/workflows/bump_version.py +++ b/.github/workflows/bump_version.py @@ -120,7 +120,16 @@ def main(): subprocess.run(["napi", "version"], cwd="node-binding", check=True) print("Updated version in node-binding/**package.json") - subprocess.run(["git", "cliff", "--tag", Updater.new_version], check=True) + subprocess.run( + [ + "git-cliff", + "--config", + ".config/cliff.toml", + "--tag", + Updater.new_version, + ], + check=True, + ) print("Updated CHANGELOG.md") subprocess.run(["git", "add", "--all"], check=True) diff --git a/.github/workflows/python-packaging.yml b/.github/workflows/python-packaging.yml index 7a501d0..297693c 100644 --- a/.github/workflows/python-packaging.yml +++ b/.github/workflows/python-packaging.yml @@ -155,6 +155,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.x - name: Build sdist uses: PyO3/maturin-action@v1 with: diff --git a/.github/workflows/run-dev-tests.yml b/.github/workflows/run-dev-tests.yml index 9a0e5ca..5c06a0a 100644 --- a/.github/workflows/run-dev-tests.yml +++ b/.github/workflows/run-dev-tests.yml @@ -88,7 +88,7 @@ jobs: if: runner.os == 'Linux' env: CLANG_VERSION: '7' - run: just test + run: just test ci - name: Install clang v8 if: runner.os == 'Linux' @@ -100,7 +100,7 @@ jobs: if: runner.os == 'Linux' env: CLANG_VERSION: '8' - run: just test + run: just test ci - name: Install clang v9 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -110,7 +110,7 @@ jobs: - name: Collect Coverage for clang v9 env: CLANG_VERSION: '9' - run: just test + run: just test ci - name: Install clang v10 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -120,7 +120,7 @@ jobs: - name: Collect Coverage for clang v10 env: CLANG_VERSION: '10' - run: just test + run: just test ci - name: Install clang 11 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -130,7 +130,7 @@ jobs: - name: Collect Coverage for clang v11 env: CLANG_VERSION: '11' - run: just test + run: just test ci - name: Install clang 12 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -140,7 +140,7 @@ jobs: - name: Collect Coverage for clang v12 env: CLANG_VERSION: '12' - run: just test + run: just test ci - name: Install clang 13 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -150,7 +150,7 @@ jobs: - name: Collect Coverage for clang v13 env: CLANG_VERSION: '13' - run: just test + run: just test ci - name: Install clang 14 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -160,7 +160,7 @@ jobs: - name: Collect Coverage for clang v14 env: CLANG_VERSION: '14' - run: just test + run: just test ci - name: Install clang 15 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -170,7 +170,7 @@ jobs: - name: Collect Coverage for clang v15 env: CLANG_VERSION: '15' - run: just test + run: just test ci - name: Install clang 16 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -180,7 +180,7 @@ jobs: - name: Collect Coverage for clang v16 env: CLANG_VERSION: '16' - run: just test + run: just test ci - name: Install clang 17 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -190,7 +190,7 @@ jobs: - name: Collect Coverage for clang v17 env: CLANG_VERSION: '17' - run: just test + run: just test ci - name: Install clang 18 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -200,7 +200,7 @@ jobs: - name: Collect Coverage for clang v18 env: CLANG_VERSION: '18' - run: just test --run-ignored=all + run: just test all - name: Generate Coverage HTML report run: just pretty-cov @@ -212,13 +212,11 @@ jobs: path: target/llvm-cov-pretty - name: Generate Coverage lcov report - if: runner.os == 'Linux' run: | rm coverage.json just lcov - uses: codecov/codecov-action@v4 - if: runner.os == 'Linux' with: token: ${{secrets.CODECOV_TOKEN}} files: lcov.info diff --git a/Cargo.lock b/Cargo.lock index 280d638..2e66778 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,6 +332,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" name = "cpp-linter" version = "2.0.0-rc4" dependencies = [ + "anyhow", "chrono", "clap", "fast-glob", @@ -359,6 +360,7 @@ dependencies = [ name = "cpp-linter-js" version = "2.0.0-rc4" dependencies = [ + "anyhow", "cpp-linter", "napi", "napi-build", diff --git a/cpp-linter/Cargo.toml b/cpp-linter/Cargo.toml index fd07244..f0d6508 100644 --- a/cpp-linter/Cargo.toml +++ b/cpp-linter/Cargo.toml @@ -14,6 +14,7 @@ license.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = "1.0.89" chrono = "0.4.38" clap = "4.5.17" fast-glob = "0.4.0" diff --git a/cpp-linter/src/clang_tools/clang_format.rs b/cpp-linter/src/clang_tools/clang_format.rs index ea87e0f..a3c82d3 100644 --- a/cpp-linter/src/clang_tools/clang_format.rs +++ b/cpp-linter/src/clang_tools/clang_format.rs @@ -6,6 +6,7 @@ use std::{ sync::{Arc, Mutex, MutexGuard}, }; +use anyhow::{Context, Result}; use log::Level; // non-std crates use serde::Deserialize; @@ -56,13 +57,15 @@ pub struct Replacement { /// /// This value is not provided by the XML output, but we calculate it after /// deserialization. - pub line: Option, + #[serde(default)] + pub line: usize, /// The column number on the line described by the [`Replacement::offset`]. /// /// This value is not provided by the XML output, but we calculate it after /// deserialization. - pub cols: Option, + #[serde(default)] + pub cols: usize, } impl Clone for Replacement { @@ -109,7 +112,7 @@ pub fn tally_format_advice(files: &[Arc>]) -> u64 { pub fn run_clang_format( file: &mut MutexGuard, clang_params: &ClangParams, -) -> Vec<(log::Level, String)> { +) -> Result> { let mut cmd = Command::new(clang_params.clang_format_command.as_ref().unwrap()); let mut logs = vec![]; cmd.args(["--style", &clang_params.style]); @@ -117,7 +120,8 @@ pub fn run_clang_format( for range in &ranges { cmd.arg(format!("--lines={}:{}", range.start(), range.end())); } - cmd.arg(file.name.to_string_lossy().as_ref()); + let file_name = file.name.to_string_lossy().to_string(); + cmd.arg(file.name.to_path_buf().as_os_str()); let mut patched = None; if clang_params.format_review { logs.push(( @@ -129,7 +133,7 @@ pub fn run_clang_format( .as_ref() .unwrap() .to_str() - .unwrap(), + .unwrap_or_default(), cmd.get_args() .map(|a| a.to_str().unwrap()) .collect::>() @@ -138,7 +142,7 @@ pub fn run_clang_format( )); patched = Some( cmd.output() - .expect("Failed to get fixes from clang-format") + .with_context(|| format!("Failed to get fixes from clang-format: {file_name}"))? .stdout, ); } @@ -147,33 +151,30 @@ pub fn run_clang_format( log::Level::Info, format!( "Running \"{} {}\"", - clang_params - .clang_format_command - .as_ref() - .unwrap() - .to_str() - .unwrap(), + cmd.get_program().to_string_lossy(), cmd.get_args() .map(|x| x.to_str().unwrap()) .collect::>() .join(" ") ), )); - let output = cmd.output().unwrap(); + let output = cmd + .output() + .with_context(|| format!("Failed to get replacements from clang-format: {file_name}"))?; if !output.stderr.is_empty() || !output.status.success() { logs.push(( log::Level::Debug, format!( "clang-format raised the follow errors:\n{}", - String::from_utf8(output.stderr).unwrap() + String::from_utf8_lossy(&output.stderr) ), )); } if output.stdout.is_empty() { - return logs; + return Ok(logs); } let xml = String::from_utf8(output.stdout) - .unwrap() + .with_context(|| format!("stdout from clang-format was not UTF-8 encoded: {file_name}"))? .lines() .collect::>() .join(""); @@ -189,11 +190,12 @@ pub fn run_clang_format( }); format_advice.patched = patched; if !format_advice.replacements.is_empty() { + // get line and column numbers from format_advice.offset let mut filtered_replacements = Vec::new(); for replacement in &mut format_advice.replacements { let (line_number, columns) = get_line_cols_from_offset(&file.name, replacement.offset); - replacement.line = Some(line_number); - replacement.cols = Some(columns); + replacement.line = line_number; + replacement.cols = columns; for range in &ranges { if range.contains(&line_number.try_into().unwrap_or(0)) { filtered_replacements.push(replacement.clone()); @@ -208,7 +210,7 @@ pub fn run_clang_format( format_advice.replacements = filtered_replacements; } file.format_advice = Some(format_advice); - logs + Ok(logs) } #[cfg(test)] @@ -234,29 +236,29 @@ mod tests { offset: 113, length: 5, value: Some(String::from("\n ")), - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 147, length: 0, value: Some(String::from(" ")), - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 161, length: 0, value: None, - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 165, length: 19, value: Some(String::from("\n\n")), - line: None, - cols: None, + line: 0, + cols: 0, }, ], patched: None, diff --git a/cpp-linter/src/clang_tools/clang_tidy.rs b/cpp-linter/src/clang_tools/clang_tidy.rs index f276e82..52d5640 100644 --- a/cpp-linter/src/clang_tools/clang_tidy.rs +++ b/cpp-linter/src/clang_tools/clang_tidy.rs @@ -9,6 +9,7 @@ use std::{ sync::{Arc, Mutex, MutexGuard}, }; +use anyhow::{Context, Result}; // non-std crates use regex::Regex; use serde::Deserialize; @@ -132,7 +133,7 @@ impl MakeSuggestions for TidyAdvice { fn parse_tidy_output( tidy_stdout: &[u8], database_json: &Option>, -) -> Option { +) -> Result> { let note_header = Regex::new(r"^(.+):(\d+):(\d+):\s(\w+):(.*)\[([a-zA-Z\d\-\.]+)\]$").unwrap(); let fixed_note = Regex::new(r"^.+:(\d+):\d+:\snote: FIX-IT applied suggested code changes$").unwrap(); @@ -178,14 +179,15 @@ fn parse_tidy_output( // likely not a member of the project's sources (ie /usr/include/stdio.h) filename = filename .strip_prefix(&cur_dir) - .expect("cannot determine filename by relative path.") + // we already checked above that filename.starts_with(current_directory) + .unwrap() .to_path_buf(); } notification = Some(TidyNotification { filename: filename.to_string_lossy().to_string().replace('\\', "/"), - line: captured[2].parse::().unwrap(), - cols: captured[3].parse::().unwrap(), + line: captured[2].parse()?, + cols: captured[3].parse()?, severity: String::from(&captured[4]), rationale: String::from(&captured[5]).trim().to_string(), diagnostic: String::from(&captured[6]), @@ -195,9 +197,7 @@ fn parse_tidy_output( // begin capturing subsequent lines as suggestions found_fix = false; } else if let Some(capture) = fixed_note.captures(line) { - let fixed_line = capture[1] - .parse() - .expect("Failed to parse fixed line number's string as integer"); + let fixed_line = capture[1].parse()?; if let Some(note) = &mut notification { if !note.fixed_lines.contains(&fixed_line) { note.fixed_lines.push(fixed_line); @@ -219,12 +219,12 @@ fn parse_tidy_output( result.push(note); } if result.is_empty() { - None + Ok(None) } else { - Some(TidyAdvice { + Ok(Some(TidyAdvice { notes: result, patched: None, - }) + })) } } @@ -249,7 +249,7 @@ pub fn tally_tidy_advice(files: &[Arc>]) -> u64 { pub fn run_clang_tidy( file: &mut MutexGuard, clang_params: &ClangParams, -) -> Vec<(log::Level, std::string::String)> { +) -> Result> { let mut cmd = Command::new(clang_params.clang_tidy_command.as_ref().unwrap()); let mut logs = vec![]; if !clang_params.tidy_checks.is_empty() { @@ -258,19 +258,15 @@ pub fn run_clang_tidy( if let Some(db) = &clang_params.database { cmd.args(["-p", &db.to_string_lossy()]); } - if let Some(extras) = &clang_params.extra_args { - for arg in extras { - cmd.args(["--extra-arg", format!("\"{}\"", arg).as_str()]); - } + for arg in &clang_params.extra_args { + cmd.args(["--extra-arg", format!("\"{}\"", arg).as_str()]); } + let file_name = file.name.to_string_lossy().to_string(); if clang_params.lines_changed_only != LinesChangedOnly::Off { let ranges = file.get_ranges(&clang_params.lines_changed_only); let filter = format!( "[{{\"name\":{:?},\"lines\":{:?}}}]", - &file - .name - .to_string_lossy() - .replace('/', if OS == "windows" { "\\" } else { "/" }), + &file_name.replace('/', if OS == "windows" { "\\" } else { "/" }), ranges .iter() .map(|r| [r.start(), r.end()]) @@ -281,10 +277,12 @@ pub fn run_clang_tidy( let mut original_content = None; if clang_params.tidy_review { cmd.arg("--fix-errors"); - original_content = - Some(fs::read_to_string(&file.name).expect( - "Failed to cache file's original content before applying clang-tidy changes.", - )); + original_content = Some(fs::read_to_string(&file.name).with_context(|| { + format!( + "Failed to cache file's original content before applying clang-tidy changes: {}", + file_name.clone() + ) + })?); } if !clang_params.style.is_empty() { cmd.args(["--format-style", clang_params.style.as_str()]); @@ -306,7 +304,7 @@ pub fn run_clang_tidy( log::Level::Debug, format!( "Output from clang-tidy:\n{}", - String::from_utf8(output.stdout.to_vec()).unwrap() + String::from_utf8_lossy(&output.stdout) ), )); if !output.stderr.is_empty() { @@ -314,25 +312,24 @@ pub fn run_clang_tidy( log::Level::Debug, format!( "clang-tidy made the following summary:\n{}", - String::from_utf8(output.stderr).unwrap() + String::from_utf8_lossy(&output.stderr) ), )); } - file.tidy_advice = parse_tidy_output(&output.stdout, &clang_params.database_json); + file.tidy_advice = parse_tidy_output(&output.stdout, &clang_params.database_json)?; if clang_params.tidy_review { - let file_name = &file.name.to_owned(); if let Some(tidy_advice) = &mut file.tidy_advice { // cache file changes in a buffer and restore the original contents for further analysis by clang-format tidy_advice.patched = - Some(fs::read(file_name).expect("Failed to read changes from clang-tidy")); + Some(fs::read(&file_name).with_context(|| { + format!("Failed to read changes from clang-tidy: {file_name}") + })?); } - fs::write( - file_name, - original_content.expect("original content of file was not cached"), - ) - .expect("failed to restore file's original content."); + // original_content is guaranteed to be Some() value at this point + fs::write(&file_name, original_content.unwrap()) + .with_context(|| format!("Failed to restore file's original content: {file_name}"))?; } - logs + Ok(logs) } #[cfg(test)] @@ -427,7 +424,7 @@ mod test { tidy_checks: "".to_string(), // use .clang-tidy config file lines_changed_only: LinesChangedOnly::Off, database: None, - extra_args: Some(extra_args.clone()), // <---- the reason for this test + extra_args: extra_args.clone(), // <---- the reason for this test database_json: None, format_filter: None, tidy_filter: None, @@ -438,6 +435,7 @@ mod test { }; let mut file_lock = arc_ref.lock().unwrap(); let logs = run_clang_tidy(&mut file_lock, &clang_params) + .unwrap() .into_iter() .filter_map(|(_lvl, msg)| { if msg.contains("Running ") { diff --git a/cpp-linter/src/clang_tools/mod.rs b/cpp-linter/src/clang_tools/mod.rs index 87efba9..68063ae 100644 --- a/cpp-linter/src/clang_tools/mod.rs +++ b/cpp-linter/src/clang_tools/mod.rs @@ -9,6 +9,7 @@ use std::{ sync::{Arc, Mutex}, }; +use anyhow::{anyhow, Context, Result}; use git2::{DiffOptions, Patch}; // non-std crates use lenient_semver; @@ -41,14 +42,14 @@ use clang_tidy::{run_clang_tidy, CompilationUnit}; /// /// The only reason this function would return an error is if the specified tool is not /// installed or present on the system (nor in the `$PATH` environment variable). -pub fn get_clang_tool_exe(name: &str, version: &str) -> Result { +pub fn get_clang_tool_exe(name: &str, version: &str) -> Result { if version.is_empty() { // The default CLI value is an empty string. // Thus, we should use whatever is installed and added to $PATH. if let Ok(cmd) = which(name) { return Ok(cmd); } else { - return Err("Could not find clang tool by name"); + return Err(anyhow!("Could not find clang tool by name")); } } if let Ok(semver) = lenient_semver::parse_into::(version) { @@ -66,14 +67,14 @@ pub fn get_clang_tool_exe(name: &str, version: &str) -> Result Result>, clang_params: Arc, -) -> (PathBuf, Vec<(log::Level, String)>) { - let mut file = file.lock().unwrap(); +) -> Result<(PathBuf, Vec<(log::Level, String)>)> { + let mut file = file + .lock() + .map_err(|_| anyhow!("Failed to lock file mutex"))?; let mut logs = vec![]; if clang_params.clang_tidy_command.is_some() { if clang_params @@ -99,7 +102,7 @@ fn analyze_single_file( .is_some_and(|f| f.is_source_or_ignored(file.name.as_path())) || clang_params.tidy_filter.is_none() { - let tidy_result = run_clang_tidy(&mut file, &clang_params); + let tidy_result = run_clang_tidy(&mut file, &clang_params)?; logs.extend(tidy_result); } else { logs.push(( @@ -108,7 +111,7 @@ fn analyze_single_file( "{} not scanned by clang-tidy due to `--ignore-tidy`", file.name.as_os_str().to_string_lossy() ), - )) + )); } } if clang_params.clang_format_command.is_some() { @@ -118,7 +121,7 @@ fn analyze_single_file( .is_some_and(|f| f.is_source_or_ignored(file.name.as_path())) || clang_params.format_filter.is_none() { - let format_result = run_clang_format(&mut file, &clang_params); + let format_result = run_clang_format(&mut file, &clang_params)?; logs.extend(format_result); } else { logs.push(( @@ -130,7 +133,7 @@ fn analyze_single_file( )); } } - (file.name.clone(), logs) + Ok((file.name.clone(), logs)) } /// Runs clang-tidy and/or clang-format and returns the parsed output from each. @@ -141,31 +144,27 @@ pub async fn capture_clang_tools_output( files: &mut Vec>>, version: &str, clang_params: &mut ClangParams, -) { +) -> Result<()> { // find the executable paths for clang-tidy and/or clang-format and show version // info as debugging output. if clang_params.tidy_checks != "-*" { clang_params.clang_tidy_command = { - let cmd = get_clang_tool_exe("clang-tidy", version).unwrap(); + let cmd = get_clang_tool_exe("clang-tidy", version)?; log::debug!( "{} --version\n{}", &cmd.to_string_lossy(), - String::from_utf8_lossy( - &Command::new(&cmd).arg("--version").output().unwrap().stdout - ) + String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output()?.stdout) ); Some(cmd) } }; if !clang_params.style.is_empty() { clang_params.clang_format_command = { - let cmd = get_clang_tool_exe("clang-format", version).unwrap(); + let cmd = get_clang_tool_exe("clang-format", version)?; log::debug!( "{} --version\n{}", &cmd.to_string_lossy(), - String::from_utf8_lossy( - &Command::new(&cmd).arg("--version").output().unwrap().stdout - ) + String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output()?.stdout) ); Some(cmd) } @@ -175,10 +174,9 @@ pub async fn capture_clang_tools_output( if let Some(db_path) = &clang_params.database { if let Ok(db_str) = fs::read(db_path.join("compile_commands.json")) { clang_params.database_json = Some( - serde_json::from_str::>( - String::from_utf8(db_str).unwrap().as_str(), - ) - .expect("Failed to parse compile_commands.json"), + // A compilation database should be UTF-8 encoded, but file paths are not; use lossy conversion. + serde_json::from_str::>(&String::from_utf8_lossy(&db_str)) + .with_context(|| "Failed to parse compile_commands.json")?, ) } }; @@ -192,7 +190,7 @@ pub async fn capture_clang_tools_output( } while let Some(output) = executors.join_next().await { - if let Ok(out) = output { + if let Ok(out) = output? { let (file_name, logs) = out; start_log_group(format!("Analyzing {}", file_name.to_string_lossy())); for (level, msg) in logs { @@ -201,6 +199,7 @@ pub async fn capture_clang_tools_output( end_log_group(); } } + Ok(()) } /// A struct to describe a single suggestion in a pull_request review. @@ -298,7 +297,7 @@ pub fn make_patch<'buffer>( path: &Path, patched: &'buffer [u8], original_content: &'buffer [u8], -) -> Patch<'buffer> { +) -> Result> { let mut diff_opts = &mut DiffOptions::new(); diff_opts = diff_opts.indent_heuristic(true); diff_opts = diff_opts.context_lines(0); @@ -309,14 +308,13 @@ pub fn make_patch<'buffer>( Some(path), Some(diff_opts), ) - .unwrap_or_else(|_| { - panic!( + .with_context(|| { + format!( "Failed to create patch for file {}.", - path.to_str() - .expect("Failed to convert file's path to string.") + path.to_string_lossy() ) - }); - patch + })?; + Ok(patch) } pub trait MakeSuggestions { @@ -333,32 +331,33 @@ pub trait MakeSuggestions { file_obj: &FileObj, patch: &mut Patch, summary_only: bool, - ) { + ) -> Result<()> { let tool_name = self.get_tool_name(); let is_tidy_tool = tool_name == "clang-tidy"; let hunks_total = patch.num_hunks(); let mut hunks_in_patch = 0u32; let file_name = file_obj .name - .to_str() - .expect("Failed to convert file path to string") + .to_string_lossy() .replace("\\", "/") .trim_start_matches("./") .to_owned(); let patch_buf = &patch .to_buf() - .expect("Failed to convert patch to byte array") + .with_context(|| "Failed to convert patch to byte array")? .to_vec(); review_comments.full_patch[is_tidy_tool as usize].push_str( String::from_utf8(patch_buf.to_owned()) - .expect("Failed to convert patch buffer to string") + .with_context(|| format!("Failed to convert patch to string: {file_name}"))? .as_str(), ); if summary_only { - return; + return Ok(()); } for hunk_id in 0..hunks_total { - let (hunk, line_count) = patch.hunk(hunk_id).expect("Failed to get hunk from patch"); + let (hunk, line_count) = patch.hunk(hunk_id).with_context(|| { + format!("Failed to get hunk {hunk_id} from patch for {file_name}") + })?; hunks_in_patch += 1; let hunk_range = file_obj.is_hunk_in_diff(&hunk); if hunk_range.is_none() { @@ -371,16 +370,16 @@ pub trait MakeSuggestions { for line_index in 0..line_count { let diff_line = patch .line_in_hunk(hunk_id, line_index) - .expect("Failed to get line in a hunk"); + .with_context(|| format!("Failed to get line {line_index} in a hunk {hunk_id} of patch for {file_name}"))?; let line = String::from_utf8(diff_line.content().to_owned()) - .expect("Failed to convert line buffer to string"); + .with_context(|| format!("Failed to convert line {line_index} buffer to string in hunk {hunk_id} of patch for {file_name}"))?; if ['+', ' '].contains(&diff_line.origin()) { suggestion.push_str(line.as_str()); } else { removed.push( diff_line .old_lineno() - .expect("Removed line has no line number?!"), + .expect("Removed line should have a line number"), ); } } @@ -397,7 +396,7 @@ pub trait MakeSuggestions { .as_str(), ) } else { - suggestion = format!("```suggestion\n{suggestion}```",); + suggestion = format!("```suggestion\n{suggestion}```"); } let comment = Suggestion { line_start: start_line, @@ -410,6 +409,7 @@ pub trait MakeSuggestions { } } review_comments.tool_total[is_tidy_tool as usize] += hunks_in_patch; + Ok(()) } } diff --git a/cpp-linter/src/cli/mod.rs b/cpp-linter/src/cli/mod.rs index f2d4e87..dd04101 100644 --- a/cpp-linter/src/cli/mod.rs +++ b/cpp-linter/src/cli/mod.rs @@ -366,29 +366,21 @@ Further filtering can still be applied (see [Source options](#source-options))." /// ``` /// The cpp-linter-action (for Github CI workflows) can only use 1 `extra-arg` input option, so /// the value will be split at spaces. -pub fn convert_extra_arg_val(args: &ArgMatches) -> Option> { - let raw_val = args - .try_get_many::("extra-arg") - .expect("parser failed in set a default for `--extra-arg`"); - if let Some(mut val) = raw_val { - if val.len() == 1 { - // specified once; split and return result - return Some( - val.next() - .unwrap() - .trim_matches('\'') - .trim_matches('"') - .split(' ') - .map(|i| i.to_string()) - .collect(), - ); - } else { - // specified multiple times; just return - Some(val.map(|i| i.to_string()).collect()) - } +pub fn convert_extra_arg_val(args: &ArgMatches) -> Vec { + let mut val = args.get_many::("extra-arg").unwrap_or_default(); + if val.len() == 1 { + // specified once; split and return result + return val + .next() + .unwrap() + .trim_matches('\'') + .trim_matches('"') + .split(' ') + .map(|i| i.to_string()) + .collect(); } else { - // no value specified; just return - None + // specified multiple times; just return + val.map(|i| i.to_string()).collect() } } @@ -407,14 +399,13 @@ mod test { fn extra_arg_0() { let args = parser_args(vec!["cpp-linter"]); let extras = convert_extra_arg_val(&args); - assert!(extras.is_none()); + assert!(extras.is_empty()); } #[test] fn extra_arg_1() { let args = parser_args(vec!["cpp-linter", "--extra-arg='-std=c++17 -Wall'"]); - let extras = convert_extra_arg_val(&args); - let extra_args = extras.expect("extra-arg not parsed properly"); + let extra_args = convert_extra_arg_val(&args); assert_eq!(extra_args.len(), 2); assert_eq!(extra_args, ["-std=c++17", "-Wall"]) } @@ -426,9 +417,7 @@ mod test { "--extra-arg=-std=c++17", "--extra-arg=-Wall", ]); - let extras = convert_extra_arg_val(&args); - assert!(extras.is_some()); - let extra_args = extras.expect("extra-arg not parsed properly"); + let extra_args = convert_extra_arg_val(&args); assert_eq!(extra_args.len(), 2); assert_eq!(extra_args, ["-std=c++17", "-Wall"]) } diff --git a/cpp-linter/src/cli/structs.rs b/cpp-linter/src/cli/structs.rs index 650f0ca..5bc876b 100644 --- a/cpp-linter/src/cli/structs.rs +++ b/cpp-linter/src/cli/structs.rs @@ -51,7 +51,7 @@ pub struct Cli { pub ignore_tidy: Option>, pub tidy_checks: String, pub database: Option, - pub extra_arg: Option>, + pub extra_arg: Vec, pub thread_comments: ThreadComments, pub no_lgtm: bool, pub step_summary: bool, @@ -161,7 +161,7 @@ pub struct ClangParams { pub tidy_checks: String, pub lines_changed_only: LinesChangedOnly, pub database: Option, - pub extra_args: Option>, + pub extra_args: Vec, pub database_json: Option>, pub style: String, pub clang_tidy_command: Option, diff --git a/cpp-linter/src/common_fs/file_filter.rs b/cpp-linter/src/common_fs/file_filter.rs index b6eff2f..88a5521 100644 --- a/cpp-linter/src/common_fs/file_filter.rs +++ b/cpp-linter/src/common_fs/file_filter.rs @@ -1,10 +1,10 @@ +use anyhow::{anyhow, Context, Result}; +use fast_glob::glob_match; use std::{ fs, path::{Path, PathBuf}, }; -use fast_glob::glob_match; - use super::FileObj; #[derive(Debug, Clone)] @@ -150,21 +150,23 @@ impl FileFilter { /// - uses at least 1 of the given `extensions` /// - is not specified in the internal list of `ignored` paths /// - is specified in the internal list `not_ignored` paths (which supersedes `ignored` paths) - pub fn list_source_files(&self, root_path: &str) -> Vec { + pub fn list_source_files(&self, root_path: &str) -> Result> { let mut files: Vec = Vec::new(); - let entries = fs::read_dir(root_path).expect("repo root-path should exist"); + let entries = fs::read_dir(root_path) + .with_context(|| format!("Failed to read directory contents: {root_path}"))?; for entry in entries.filter_map(|p| p.ok()) { let path = entry.path(); if path.is_dir() { let mut is_hidden = false; - let parent = path.components().last().expect("parent not known"); + let parent = path + .components() + .last() + .ok_or(anyhow!("parent directory not known for {path:?}"))?; if parent.as_os_str().to_str().unwrap().starts_with('.') { is_hidden = true; } if !is_hidden { - files.extend( - self.list_source_files(&path.into_os_string().into_string().unwrap()), - ); + files.extend(self.list_source_files(&path.to_string_lossy())?); } } else { let is_valid_src = self.is_source_or_ignored(&path); @@ -175,7 +177,7 @@ impl FileFilter { } } } - files + Ok(files) } } @@ -255,7 +257,7 @@ mod tests { fn walk_dir_recursively() { let extensions = vec!["cpp".to_string(), "hpp".to_string()]; let file_filter = setup_ignore("target", extensions.clone()); - let files = file_filter.list_source_files("."); + let files = file_filter.list_source_files(".").unwrap(); assert!(!files.is_empty()); for file in files { assert!(extensions.contains( diff --git a/cpp-linter/src/common_fs/mod.rs b/cpp-linter/src/common_fs/mod.rs index 253d028..b35dcb2 100644 --- a/cpp-linter/src/common_fs/mod.rs +++ b/cpp-linter/src/common_fs/mod.rs @@ -6,6 +6,8 @@ use std::io::Read; use std::path::{Component, Path}; use std::{ops::RangeInclusive, path::PathBuf}; +use anyhow::{Context, Result}; + use crate::clang_tools::clang_format::FormatAdvice; use crate::clang_tools::clang_tidy::TidyAdvice; use crate::clang_tools::{make_patch, MakeSuggestions, ReviewComments, Suggestion}; @@ -127,30 +129,26 @@ impl FileObj { &self, review_comments: &mut ReviewComments, summary_only: bool, - ) { + ) -> Result<()> { let original_content = - fs::read(&self.name).expect("Failed to read original contents of file"); - let file_name = self - .name - .to_str() - .expect("Failed to convert file extension to string") - .replace("\\", "/"); + fs::read(&self.name).with_context(|| "Failed to read original contents of file")?; + let file_name = self.name.to_str().unwrap_or_default().replace("\\", "/"); let file_path = Path::new(&file_name); if let Some(advice) = &self.format_advice { if let Some(patched) = &advice.patched { - let mut patch = make_patch(file_path, patched, &original_content); - advice.get_suggestions(review_comments, self, &mut patch, summary_only); + let mut patch = make_patch(file_path, patched, &original_content)?; + advice.get_suggestions(review_comments, self, &mut patch, summary_only)?; } } if let Some(advice) = &self.tidy_advice { if let Some(patched) = &advice.patched { - let mut patch = make_patch(file_path, patched, &original_content); - advice.get_suggestions(review_comments, self, &mut patch, summary_only); + let mut patch = make_patch(file_path, patched, &original_content)?; + advice.get_suggestions(review_comments, self, &mut patch, summary_only)?; } if summary_only { - return; + return Ok(()); } // now check for clang-tidy warnings with no fixes applied @@ -159,7 +157,7 @@ impl FileObj { .extension() .unwrap_or_default() .to_str() - .expect("Failed to convert file extension to string"); + .unwrap_or_default(); for note in &advice.notes { if note.fixed_lines.is_empty() { // notification had no suggestion applied in `patched` @@ -200,6 +198,7 @@ impl FileObj { } } } + Ok(()) } } diff --git a/cpp-linter/src/git.rs b/cpp-linter/src/git.rs index e98486d..030ed65 100644 --- a/cpp-linter/src/git.rs +++ b/cpp-linter/src/git.rs @@ -10,6 +10,7 @@ use std::{ops::RangeInclusive, path::PathBuf}; +use anyhow::{Context, Result}; // non-std crates use git2::{Diff, Error, Patch, Repository}; @@ -45,7 +46,7 @@ fn get_sha(repo: &Repository, depth: Option) -> Result, Er /// If there are files staged for a commit, then the resulting [`Diff`] will describe /// the staged changes. However, if there are no staged changes, then the last commit's /// [`Diff`] is returned. -pub fn get_diff(repo: &Repository) -> git2::Diff { +pub fn get_diff(repo: &Repository) -> Result { let head = get_sha(repo, None).unwrap().peel_to_tree().unwrap(); let mut has_staged_files = false; for entry in repo.statuses(None).unwrap().iter() { @@ -63,12 +64,12 @@ pub fn get_diff(repo: &Repository) -> git2::Diff { if has_staged_files { // get diff for staged files only repo.diff_tree_to_index(Some(&head), None, None) - .expect("Could not get diff for current changes in local repo index") + .with_context(|| "Could not get diff for current changes in local repo index") } else { // get diff for last commit only let base = get_sha(repo, Some(1)).unwrap().peel_to_tree().unwrap(); repo.diff_tree_to_tree(Some(&base), Some(&head), None) - .expect("could not get diff for last commit") + .with_context(|| "Could not get diff for last commit") } } @@ -407,12 +408,13 @@ mod test { set_current_dir(tmp).unwrap(); env::set_var("CI", "false"); // avoid use of REST API when testing in CI rest_api_client + .unwrap() .get_list_of_changed_files(&file_filter) .await + .unwrap() } #[tokio::test] - #[ignore] async fn with_no_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; @@ -427,7 +429,6 @@ mod test { } #[tokio::test] - #[ignore] async fn with_changed_sources() { // commit with modified C/C++ sources let sha = "950ff0b690e1903797c303c5fc8d9f3b52f1d3c5"; @@ -447,7 +448,6 @@ mod test { } #[tokio::test] - #[ignore] async fn with_staged_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; diff --git a/cpp-linter/src/main.rs b/cpp-linter/src/main.rs index cd8ebe8..27c079a 100644 --- a/cpp-linter/src/main.rs +++ b/cpp-linter/src/main.rs @@ -3,9 +3,10 @@ use std::env; use ::cpp_linter::run::run_main; +use anyhow::Result; /// This function simply forwards CLI args to [`run_main()`]. #[tokio::main] -pub async fn main() { - run_main(env::args().collect::>()).await; +pub async fn main() -> Result<()> { + run_main(env::args().collect::>()).await } diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 7d516e9..0fa66c3 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -9,6 +9,7 @@ use std::io::Write; use std::sync::{Arc, Mutex}; // non-std crates +use anyhow::{Context, Result}; use reqwest::{ header::{HeaderMap, HeaderValue, AUTHORIZATION}, Client, Method, Url, @@ -34,7 +35,7 @@ pub struct GithubApiClient { client: Client, /// The CI run's event payload from the webhook that triggered the workflow. - pull_request: Option, + pull_request: i64, /// The name of the event that was triggered when running cpp_linter. pub event_name: String, @@ -51,6 +52,7 @@ pub struct GithubApiClient { /// The value of the `ACTIONS_STEP_DEBUG` environment variable. pub debug_enabled: bool, + /// The response header names that describe the rate limit status. rate_limit_headers: RestApiRateLimitHeaders, } @@ -63,23 +65,23 @@ impl RestApiClient for GithubApiClient { tidy_checks_failed: Option, ) -> u64 { if let Ok(gh_out) = env::var("GITHUB_OUTPUT") { - let mut gh_out_file = OpenOptions::new() - .append(true) - .open(gh_out) - .expect("GITHUB_OUTPUT file could not be opened"); - for (prompt, value) in [ - ("checks-failed", Some(checks_failed)), - ("format-checks-failed", format_checks_failed), - ("tidy-checks-failed", tidy_checks_failed), - ] { - if let Err(e) = writeln!(gh_out_file, "{prompt}={}", value.unwrap_or(0),) { - log::error!("Could not write to GITHUB_OUTPUT file: {}", e); - break; + if let Ok(mut gh_out_file) = OpenOptions::new().append(true).open(gh_out) { + for (prompt, value) in [ + ("checks-failed", Some(checks_failed)), + ("format-checks-failed", format_checks_failed), + ("tidy-checks-failed", tidy_checks_failed), + ] { + if let Err(e) = writeln!(gh_out_file, "{prompt}={}", value.unwrap_or(0),) { + log::error!("Could not write to GITHUB_OUTPUT file: {}", e); + break; + } + } + if let Err(e) = gh_out_file.flush() { + log::debug!("Failed to flush buffer to GITHUB_OUTPUT file: {e:?}"); } + } else { + log::debug!("GITHUB_OUTPUT file could not be opened"); } - gh_out_file - .flush() - .expect("Failed to flush buffer to GITHUB_OUTPUT file"); } log::info!( "{} clang-format-checks-failed", @@ -93,84 +95,74 @@ impl RestApiClient for GithubApiClient { checks_failed } - fn make_headers() -> HeaderMap { + fn make_headers() -> Result> { let mut headers = HeaderMap::new(); headers.insert( "Accept", - HeaderValue::from_str("application/vnd.github.raw+json") - .expect("Failed to create a header value for the API return data type"), + HeaderValue::from_str("application/vnd.github.raw+json")?, ); - // headers.insert("User-Agent", USER_AGENT.parse().unwrap()); if let Ok(token) = env::var("GITHUB_TOKEN") { - let mut val = HeaderValue::from_str(token.as_str()) - .expect("Failed to create a secure header value for the API token."); + log::debug!("Using auth token from GITHUB_TOKEN environment variable"); + let mut val = HeaderValue::from_str(format!("token {token}").as_str())?; val.set_sensitive(true); headers.insert(AUTHORIZATION, val); } - headers + Ok(headers) } - async fn get_list_of_changed_files(&self, file_filter: &FileFilter) -> Vec { + async fn get_list_of_changed_files(&self, file_filter: &FileFilter) -> Result> { if env::var("CI").is_ok_and(|val| val.as_str() == "true") && self.repo.is_some() && self.sha.is_some() { // get diff from Github REST API let is_pr = self.event_name == "pull_request"; - let pr = self.pull_request.unwrap_or(-1).to_string(); + let pr = self.pull_request.to_string(); let sha = self.sha.clone().unwrap(); let url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", self.repo.as_ref().unwrap()).as_str()) - .unwrap() - .join(if is_pr { "pulls/" } else { "commits/" }) - .unwrap() - .join(if is_pr { pr.as_str() } else { sha.as_str() }) - .unwrap(); + .join("repos/")? + .join(format!("{}/", self.repo.as_ref().unwrap()).as_str())? + .join(if is_pr { "pulls/" } else { "commits/" })? + .join(if is_pr { pr.as_str() } else { sha.as_str() })?; let mut diff_header = HeaderMap::new(); - diff_header.insert("Accept", "application/vnd.github.diff".parse().unwrap()); + diff_header.insert("Accept", "application/vnd.github.diff".parse()?); + log::debug!("Getting file changes from {}", url.as_str()); let request = Self::make_api_request( &self.client, url.as_str(), Method::GET, None, Some(diff_header), - ); + )?; let response = Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) - .await; - match response { - Some(response) => { - if response.status.is_success() { - return parse_diff_from_buf(response.text.as_bytes(), file_filter); - } else { - let endpoint = if is_pr { - Url::parse(format!("{}/files", url.as_str()).as_str()) - .expect("failed to parse URL endpoint") - } else { - url - }; - self.get_changed_files_paginated(endpoint, file_filter) - .await - } - } - None => { - panic!("Failed to connect with GitHub server to get list of changed files.") - } + .await + .with_context(|| "Failed to get list of changed files from GitHub server.")?; + if response.status().is_success() { + Ok(parse_diff_from_buf(&response.bytes().await?, file_filter)) + } else { + let endpoint = if is_pr { + Url::parse(format!("{}/files", url.as_str()).as_str())? + } else { + url + }; + Self::log_response(response, "Failed to get full diff for event").await; + log::debug!("Trying paginated request to {}", endpoint.as_str()); + self.get_changed_files_paginated(endpoint, file_filter) + .await } } else { // get diff from libgit2 API - let repo = open_repo(".") - .expect("Please ensure the repository is checked out before running cpp-linter."); - let list = parse_diff(&get_diff(&repo), file_filter); - list + let repo = open_repo(".").with_context(|| { + "Please ensure the repository is checked out before running cpp-linter." + })?; + let list = parse_diff(&get_diff(&repo)?, file_filter); + Ok(list) } } @@ -178,30 +170,32 @@ impl RestApiClient for GithubApiClient { &self, url: Url, file_filter: &FileFilter, - ) -> Vec { - let mut url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")]).unwrap()); + ) -> Result> { + let mut url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")])?); let mut files = vec![]; while let Some(ref endpoint) = url { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, - true, self.rate_limit_headers.clone(), 0, ) .await; - if let Some(response) = response { - url = Self::try_next_page(&response.headers); + if let Ok(response) = response { + url = Self::try_next_page(response.headers()); let files_list = if self.event_name != "pull_request" { - let json_value: PushEventFiles = serde_json::from_str(&response.text) - .expect("Failed to deserialize list of changed files from json response"); + let json_value: PushEventFiles = serde_json::from_str(&response.text().await?) + .with_context(|| { + "Failed to deserialize list of changed files from json response" + })?; json_value.files } else { - serde_json::from_str::>(&response.text).expect( - "Failed to deserialize list of file changes from Pull Request event.", - ) + serde_json::from_str::>(&response.text().await?) + .with_context(|| { + "Failed to deserialize list of file changes from Pull Request event." + })? }; for file in files_list { if let Some(patch) = file.patch { @@ -219,14 +213,14 @@ impl RestApiClient for GithubApiClient { } } } - files + Ok(files) } async fn post_feedback( &self, files: &[Arc>], feedback_inputs: FeedbackInput, - ) -> u64 { + ) -> Result { let tidy_checks_failed = tally_tidy_advice(files); let format_checks_failed = tally_format_advice(files); let mut comment = None; @@ -257,20 +251,15 @@ impl RestApiClient for GithubApiClient { } if let Some(repo) = &self.repo { let is_pr = self.event_name == "pull_request"; - let pr = self.pull_request.unwrap_or(-1).to_string() + "/"; + let pr = self.pull_request.to_string() + "/"; let sha = self.sha.clone().unwrap() + "/"; let comments_url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", repo).as_str()) - .unwrap() - .join(if is_pr { "issues/" } else { "commits/" }) - .unwrap() - .join(if is_pr { pr.as_str() } else { sha.as_str() }) - .unwrap() - .join("comments/") - .unwrap(); + .join("repos/")? + .join(format!("{}/", repo).as_str())? + .join(if is_pr { "issues/" } else { "commits/" })? + .join(if is_pr { pr.as_str() } else { sha.as_str() })? + .join("comments")?; self.update_comment( comments_url, @@ -279,15 +268,15 @@ impl RestApiClient for GithubApiClient { format_checks_failed + tidy_checks_failed == 0, feedback_inputs.thread_comments == ThreadComments::Update, ) - .await; + .await?; } } if self.event_name == "pull_request" && (feedback_inputs.tidy_review || feedback_inputs.format_review) { - self.post_review(files, &feedback_inputs).await; + self.post_review(files, &feedback_inputs).await?; } - format_checks_failed + tidy_checks_failed + Ok(format_checks_failed + tidy_checks_failed) } } @@ -297,71 +286,121 @@ mod test { default::Default, env, io::Read, - path::PathBuf, + path::{Path, PathBuf}, sync::{Arc, Mutex}, }; - use chrono::Utc; - use mockito::{Matcher, Server}; use regex::Regex; - use reqwest::{Method, Url}; use tempfile::{tempdir, NamedTempFile}; use super::GithubApiClient; use crate::{ - clang_tools::capture_clang_tools_output, - cli::{ClangParams, FeedbackInput, LinesChangedOnly}, - common_fs::FileObj, + clang_tools::{ + clang_format::{FormatAdvice, Replacement}, + clang_tidy::{TidyAdvice, TidyNotification}, + }, + cli::FeedbackInput, + common_fs::{FileFilter, FileObj}, + logger, rest_api::{RestApiClient, USER_OUTREACH}, }; // ************************* tests for step-summary and output variables - async fn create_comment(tidy_checks: &str, style: &str) -> (String, String) { + async fn create_comment( + is_lgtm: bool, + fail_gh_out: bool, + fail_summary: bool, + ) -> (String, String) { let tmp_dir = tempdir().unwrap(); - let rest_api_client = GithubApiClient::default(); + let rest_api_client = GithubApiClient::new().unwrap(); + logger::init().unwrap(); if env::var("ACTIONS_STEP_DEBUG").is_ok_and(|var| var == "true") { assert!(rest_api_client.debug_enabled); + log::set_max_level(log::LevelFilter::Debug); + } + let mut files = vec![]; + if !is_lgtm { + for _i in 0..65535 { + let filename = String::from("tests/demo/demo.cpp"); + let mut file = FileObj::new(PathBuf::from(&filename)); + let notes = vec![TidyNotification { + filename, + line: 0, + cols: 0, + severity: String::from("note"), + rationale: String::from("A test dummy rationale"), + diagnostic: String::from("clang-diagnostic-warning"), + suggestion: vec![], + fixed_lines: vec![], + }]; + file.tidy_advice = Some(TidyAdvice { + notes, + patched: None, + }); + let replacements = vec![Replacement { + offset: 0, + length: 0, + value: Some(String::new()), + line: 1, + cols: 1, + }]; + file.format_advice = Some(FormatAdvice { + replacements, + patched: None, + }); + files.push(Arc::new(Mutex::new(file))); + } } - let mut files = vec![Arc::new(Mutex::new(FileObj::new(PathBuf::from( - "tests/demo/demo.cpp", - ))))]; - let mut clang_params = ClangParams { - tidy_checks: tidy_checks.to_string(), - lines_changed_only: LinesChangedOnly::Off, - style: style.to_string(), - ..Default::default() - }; - capture_clang_tools_output( - &mut files, - env::var("CLANG-VERSION").unwrap_or("".to_string()).as_str(), - &mut clang_params, - ) - .await; let feedback_inputs = FeedbackInput { - style: style.to_string(), + style: if is_lgtm { + String::new() + } else { + String::from("file") + }, step_summary: true, ..Default::default() }; let mut step_summary_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); - env::set_var("GITHUB_STEP_SUMMARY", step_summary_path.path()); + env::set_var( + "GITHUB_STEP_SUMMARY", + if fail_summary { + Path::new("not-a-file.txt") + } else { + step_summary_path.path() + }, + ); let mut gh_out_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); - env::set_var("GITHUB_OUTPUT", gh_out_path.path()); - rest_api_client.post_feedback(&files, feedback_inputs).await; + env::set_var( + "GITHUB_OUTPUT", + if fail_gh_out { + Path::new("not-a-file.txt") + } else { + gh_out_path.path() + }, + ); + rest_api_client + .post_feedback(&files, feedback_inputs) + .await + .unwrap(); let mut step_summary_content = String::new(); step_summary_path .read_to_string(&mut step_summary_content) .unwrap(); - assert!(&step_summary_content.contains(USER_OUTREACH)); + if !fail_summary { + assert!(&step_summary_content.contains(USER_OUTREACH)); + } let mut gh_out_content = String::new(); gh_out_path.read_to_string(&mut gh_out_content).unwrap(); - assert!(gh_out_content.starts_with("checks-failed=")); + if !fail_gh_out { + assert!(gh_out_content.starts_with("checks-failed=")); + } (step_summary_content, gh_out_content) } #[tokio::test] async fn check_comment_concerns() { - let (comment, gh_out) = create_comment("readability-*", "file").await; + let (comment, gh_out) = create_comment(false, false, false).await; assert!(&comment.contains(":warning:\nSome files did not pass the configured checks!\n")); let fmt_pattern = Regex::new(r"format-checks-failed=(\d+)\n").unwrap(); let tidy_pattern = Regex::new(r"tidy-checks-failed=(\d+)\n").unwrap(); @@ -381,60 +420,42 @@ mod test { #[tokio::test] async fn check_comment_lgtm() { env::set_var("ACTIONS_STEP_DEBUG", "true"); - let (comment, gh_out) = create_comment("-*", "").await; - assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); + let (comment, gh_out) = create_comment(true, false, false).await; + assert!(comment.contains(":heavy_check_mark:\nNo problems need attention.")); assert_eq!( - &gh_out, + gh_out, "checks-failed=0\nformat-checks-failed=0\ntidy-checks-failed=0\n" ); } - async fn simulate_rate_limit(secondary: bool) { - let mut server = Server::new_async().await; - let url = Url::parse(server.url().as_str()).unwrap(); - env::set_var("GITHUB_API_URL", server.url()); - let client = GithubApiClient::default(); - let reset_timestamp = (Utc::now().timestamp() + 60).to_string(); - let mock = server - .mock("GET", "/") - .match_body(Matcher::Any) - .expect_at_least(1) - .expect_at_most(5) - .with_status(429) - .with_header( - &client.rate_limit_headers.remaining, - if secondary { "1" } else { "0" }, - ) - .with_header(&client.rate_limit_headers.reset, &reset_timestamp); - if secondary { - mock.with_header(&client.rate_limit_headers.retry, "0") - .create(); - } else { - mock.create(); - } - let request = - GithubApiClient::make_api_request(&client.client, url, Method::GET, None, None); - GithubApiClient::send_api_request( - client.client.clone(), - request, - true, - client.rate_limit_headers.clone(), - 0, - ) - .await; + #[tokio::test] + async fn fail_gh_output() { + env::set_var("ACTIONS_STEP_DEBUG", "true"); + let (comment, gh_out) = create_comment(true, true, false).await; + assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); + assert!(gh_out.is_empty()); } #[tokio::test] - #[ignore] - #[should_panic(expected = "REST API secondary rate limit exceeded")] - async fn secondary_rate_limit() { - simulate_rate_limit(true).await; + async fn fail_gh_summary() { + env::set_var("ACTIONS_STEP_DEBUG", "true"); + let (comment, gh_out) = create_comment(true, false, true).await; + assert!(comment.is_empty()); + assert_eq!( + gh_out, + "checks-failed=0\nformat-checks-failed=0\ntidy-checks-failed=0\n" + ); } #[tokio::test] - #[ignore] - #[should_panic(expected = "REST API rate limit exceeded!")] - async fn primary_rate_limit() { - simulate_rate_limit(false).await; + async fn fail_get_local_diff() { + env::set_var("CI", "false"); + let tmp_dir = tempdir().unwrap(); + env::set_current_dir(tmp_dir.path()).unwrap(); + let rest_client = GithubApiClient::new().unwrap(); + let files = rest_client + .get_list_of_changed_files(&FileFilter::new(&[], vec![])) + .await; + assert!(files.is_err()) } } diff --git a/cpp-linter/src/rest_api/github/serde_structs.rs b/cpp-linter/src/rest_api/github/serde_structs.rs index df03443..c23eb40 100644 --- a/cpp-linter/src/rest_api/github/serde_structs.rs +++ b/cpp-linter/src/rest_api/github/serde_structs.rs @@ -16,6 +16,7 @@ pub struct FullReview { pub struct ReviewDiffComment { pub body: String, pub line: i64, + #[serde(skip_serializing_if = "Option::is_none")] pub start_line: Option, pub path: String, } @@ -35,6 +36,9 @@ impl From for ReviewDiffComment { } } +/// A constant string used as a payload to dismiss PR reviews. +pub const REVIEW_DISMISSAL: &str = r#"{"event":"DISMISS","message":"outdated suggestion"}"#; + /// A structure for deserializing a single changed file in a CI event. #[derive(Debug, Deserialize, PartialEq, Clone)] pub struct GithubChangedFile { diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 4d1abb0..0ecb25a 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -8,95 +8,85 @@ use std::{ sync::{Arc, Mutex}, }; +use anyhow::{anyhow, Context, Result}; use reqwest::{Client, Method, Url}; use crate::{ clang_tools::{clang_format::summarize_style, ReviewComments}, cli::FeedbackInput, common_fs::FileObj, - rest_api::{RestApiRateLimitHeaders, COMMENT_MARKER}, + rest_api::{RestApiRateLimitHeaders, COMMENT_MARKER, USER_AGENT}, }; use super::{ - serde_structs::{FullReview, PullRequestInfo, ReviewComment, ReviewDiffComment, ThreadComment}, + serde_structs::{ + FullReview, PullRequestInfo, ReviewComment, ReviewDiffComment, ThreadComment, + REVIEW_DISMISSAL, + }, GithubApiClient, RestApiClient, }; -impl Default for GithubApiClient { - fn default() -> Self { - Self::new() - } -} - impl GithubApiClient { /// Instantiate a [`GithubApiClient`] object. - pub fn new() -> Self { + pub fn new() -> Result { let event_name = env::var("GITHUB_EVENT_NAME").unwrap_or(String::from("unknown")); let pull_request = { match event_name.as_str() { "pull_request" => { - let event_payload_path = env::var("GITHUB_EVENT_PATH") - .expect("GITHUB_EVENT_NAME is set to 'pull_request', but GITHUB_EVENT_PATH is not set"); + // GITHUB_*** env vars cannot be overwritten in CI runners on GitHub. + let event_payload_path = env::var("GITHUB_EVENT_PATH")?; + // event payload JSON file can be overwritten/removed in CI runners let file_buf = &mut String::new(); OpenOptions::new() .read(true) - .open(event_payload_path) - .unwrap() + .open(event_payload_path.clone())? .read_to_string(file_buf) - .unwrap(); - let json = serde_json::from_str::>( - file_buf, - ) - .unwrap(); - json["number"].as_i64() + .with_context(|| { + format!("Failed to read event payload at {event_payload_path}") + })?; + let payload = + serde_json::from_str::>( + file_buf, + ) + .with_context(|| "Failed to deserialize event payload")?; + payload["number"].as_i64().unwrap_or(-1) } - _ => None, + _ => -1, } }; - let api_url = Url::parse( - env::var("GITHUB_API_URL") - .unwrap_or("https://api.github.com".to_string()) - .as_str(), - ) - .expect("Failed to parse URL from GITHUB_API_URL"); + // GITHUB_*** env vars cannot be overwritten in CI runners on GitHub. + let gh_api_url = env::var("GITHUB_API_URL").unwrap_or("https://api.github.com".to_string()); + let api_url = Url::parse(gh_api_url.as_str())?; - GithubApiClient { + Ok(GithubApiClient { client: Client::builder() - .default_headers(Self::make_headers()) - .build() - .expect("Failed to create a session client for REST API calls"), + .default_headers(Self::make_headers()?) + .user_agent(USER_AGENT) + .build()?, pull_request, event_name, api_url, - repo: match env::var("GITHUB_REPOSITORY") { - Ok(val) => Some(val), - Err(_) => None, - }, - sha: match env::var("GITHUB_SHA") { - Ok(val) => Some(val), - Err(_) => None, - }, - debug_enabled: match env::var("ACTIONS_STEP_DEBUG") { - Ok(val) => val == "true", - Err(_) => false, - }, + repo: env::var("GITHUB_REPOSITORY").ok(), + sha: env::var("GITHUB_SHA").ok(), + debug_enabled: env::var("ACTIONS_STEP_DEBUG").is_ok_and(|val| &val == "true"), rate_limit_headers: RestApiRateLimitHeaders { reset: "x-ratelimit-reset".to_string(), remaining: "x-ratelimit-remaining".to_string(), retry: "retry-after".to_string(), }, - } + }) } /// Append step summary to CI workflow's summary page. pub fn post_step_summary(&self, comment: &String) { if let Ok(gh_out) = env::var("GITHUB_STEP_SUMMARY") { - let mut gh_out_file = OpenOptions::new() - .append(true) - .open(gh_out) - .expect("GITHUB_STEP_SUMMARY file could not be opened"); - if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { - log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); + // step summary MD file can be overwritten/removed in CI runners + if let Ok(mut gh_out_file) = OpenOptions::new().append(true).open(gh_out) { + if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { + log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); + } + } else { + log::error!("GITHUB_STEP_SUMMARY file could not be opened"); } } } @@ -112,10 +102,8 @@ impl GithubApiClient { // assemble a list of line numbers let mut lines: Vec = Vec::new(); for replacement in &format_advice.replacements { - if let Some(line_int) = replacement.line { - if !lines.contains(&line_int) { - lines.push(line_int); - } + if !lines.contains(&replacement.line) { + lines.push(replacement.line); } } // post annotation if any applicable lines were formatted @@ -157,13 +145,12 @@ impl GithubApiClient { no_lgtm: bool, is_lgtm: bool, update_only: bool, - ) { + ) -> Result<()> { let comment_url = self .remove_bot_comments(&url, !update_only || (is_lgtm && no_lgtm)) - .await; - #[allow(clippy::nonminimal_bool)] // an inaccurate assessment - if (is_lgtm && !no_lgtm) || !is_lgtm { - let payload = HashMap::from([("body", comment.to_owned())]); + .await?; + if !is_lgtm || !no_lgtm { + let payload = HashMap::from([("body", comment)]); // log::debug!("payload body:\n{:?}", payload); let req_meth = if comment_url.is_some() { Method::PATCH @@ -172,168 +159,213 @@ impl GithubApiClient { }; let request = Self::make_api_request( &self.client, - if let Some(url_) = comment_url { - url_ - } else { - url - }, + comment_url.unwrap_or(url), req_meth, - Some( - serde_json::to_string(&payload) - .expect("Failed to serialize thread comment to json string"), - ), + Some(serde_json::json!(&payload).to_string()), None, - ); - Self::send_api_request( + )?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) - .await; + .await + { + Ok(response) => { + Self::log_response(response, "Failed to post thread comment").await; + } + Err(e) => { + log::error!("Failed to post thread comment: {e:?}"); + } + } } + Ok(()) } /// Remove thread comments previously posted by cpp-linter. - async fn remove_bot_comments(&self, url: &Url, delete: bool) -> Option { + async fn remove_bot_comments(&self, url: &Url, delete: bool) -> Result> { let mut comment_url = None; - let mut comments_url = Some( - Url::parse_with_params(url.as_str(), &[("page", "1")]) - .expect("Failed to parse invalid URL string"), - ); + let mut comments_url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")])?); let repo = format!( - "repos/{}/comments/", - self.repo.as_ref().expect("Repo name unknown.") + "repos/{}{}/comments", + // if we got here, then we know it is on a CI runner as self.repo should be known + self.repo.as_ref().expect("Repo name unknown."), + if self.event_name == "pull_request" { + "/issues" + } else { + "" + }, ); let base_comment_url = self.api_url.join(&repo).unwrap(); while let Some(ref endpoint) = comments_url { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); - let response = Self::send_api_request( + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; + let result = Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) .await; - if response.is_none() || response.as_ref().is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to get list of existing comments from {}", endpoint); - return comment_url; - } - comments_url = Self::try_next_page(&response.as_ref().unwrap().headers); - let payload: Vec = serde_json::from_str(&response.unwrap().text) - .expect("Failed to serialize response's text"); - for comment in payload { - if comment.body.starts_with(COMMENT_MARKER) { - log::debug!( - "comment id {} from user {} ({})", - comment.id, - comment.user.login, - comment.user.id, - ); - #[allow(clippy::nonminimal_bool)] // an inaccurate assessment - if delete || (!delete && comment_url.is_some()) { - // if not updating: remove all outdated comments - // if updating: remove all outdated comments except the last one - - // use last saved comment_url (if not None) or current comment url - let del_url = if let Some(last_url) = &comment_url { - last_url - } else { - let comment_id = comment.id.to_string(); - &base_comment_url - .join(&comment_id) - .expect("Failed to parse URL from JSON comment.url") - }; - let req = Self::make_api_request( - &self.client, - del_url.clone(), - Method::DELETE, - None, - None, - ); - Self::send_api_request( - self.client.clone(), - req, - false, - self.rate_limit_headers.to_owned(), - 0, + match result { + Err(e) => { + log::error!("Failed to get list of existing thread comments: {e:?}"); + return Ok(comment_url); + } + Ok(response) => { + if !response.status().is_success() { + Self::log_response( + response, + "Failed to get list of existing thread comments", ) .await; + return Ok(comment_url); } - if !delete { - let comment_id = comment.id.to_string(); - comment_url = Some( - base_comment_url - .join(&comment_id) - .expect("Failed to parse URL from JSON comment.url"), - ) + comments_url = Self::try_next_page(response.headers()); + let payload = + serde_json::from_str::>(&response.text().await?); + match payload { + Err(e) => { + log::error!( + "Failed to deserialize list of existing thread comments: {e:?}" + ); + continue; + } + Ok(payload) => { + for comment in payload { + if comment.body.starts_with(COMMENT_MARKER) { + log::debug!( + "Found cpp-linter comment id {} from user {} ({})", + comment.id, + comment.user.login, + comment.user.id, + ); + let this_comment_url = Url::parse( + format!("{base_comment_url}/{}", comment.id).as_str(), + )?; + if delete || comment_url.is_some() { + // if not updating: remove all outdated comments + // if updating: remove all outdated comments except the last one + + // use last saved comment_url (if not None) or current comment url + let del_url = if let Some(last_url) = &comment_url { + last_url + } else { + &this_comment_url + }; + let req = Self::make_api_request( + &self.client, + del_url.as_str(), + Method::DELETE, + None, + None, + )?; + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.to_owned(), + 0, + ) + .await + { + Ok(result) => { + if !result.status().is_success() { + Self::log_response( + result, + "Failed to delete old thread comment", + ) + .await; + } + } + Err(e) => { + log::error!( + "Failed to delete old thread comment: {e:?}" + ) + } + } + } + if !delete { + comment_url = Some(this_comment_url) + } + } + } + } } } } } - comment_url + Ok(comment_url) } /// Post a PR review with code suggestions. /// /// Note: `--no-lgtm` is applied when nothing is suggested. - pub async fn post_review(&self, files: &[Arc>], feedback_input: &FeedbackInput) { + pub async fn post_review( + &self, + files: &[Arc>], + feedback_input: &FeedbackInput, + ) -> Result<()> { let url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", self.repo.as_ref().expect("Repo name unknown")).as_str()) - .unwrap() - .join("pulls/") - .unwrap() + .join("repos/")? .join( - self.pull_request - .expect("pull request number unknown") - .to_string() - .as_str(), - ) - .unwrap(); - let request = Self::make_api_request(&self.client, url.as_str(), Method::GET, None, None); + format!( + "{}/", + // if we got here, then we know self.repo should be known + self.repo.as_ref().ok_or(anyhow!("Repo name unknown"))? + ) + .as_str(), + )? + .join("pulls/")? + // if we got here, then we know that it is a self.pull_request is a valid value + .join(self.pull_request.to_string().as_str())?; + let request = Self::make_api_request(&self.client, url.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, - true, self.rate_limit_headers.clone(), 0, - ) - .await; - let pr_info: PullRequestInfo = - serde_json::from_str(&response.expect("Failed to get PR info").text) - .expect("Failed to deserialize PR info"); + ); - let url = Url::parse(format!("{}/", url.as_str()).as_str()) - .unwrap() - .join("reviews") - .expect("Failed to parse URL endpoint for PR reviews"); + let url = Url::parse(format!("{}/", url).as_str())?.join("reviews")?; let dismissal = self.dismiss_outdated_reviews(&url); - - if pr_info.draft || pr_info.state != "open" { - dismissal.await; - return; + match response.await { + Ok(response) => { + match serde_json::from_str::(&response.text().await?) { + Err(e) => { + log::error!("Failed to deserialize PR info: {e:?}"); + return dismissal.await; + } + Ok(pr_info) => { + if pr_info.draft || pr_info.state != "open" { + return dismissal.await; + } + } + } + } + Err(e) => { + log::error!("Failed to get PR info from {e:?}"); + return dismissal.await; + } } - let summary_only = - env::var("CPP_LINTER_PR_REVIEW_SUMMARY_ONLY").unwrap_or("false".to_string()) == "true"; + let summary_only = ["true", "on", "1"].contains( + &env::var("CPP_LINTER_PR_REVIEW_SUMMARY_ONLY") + .unwrap_or("false".to_string()) + .as_str(), + ); let mut review_comments = ReviewComments::default(); for file in files { let file = file.lock().unwrap(); - file.make_suggestions_from_patch(&mut review_comments, summary_only); + file.make_suggestions_from_patch(&mut review_comments, summary_only)?; } let has_no_changes = review_comments.full_patch[0].is_empty() && review_comments.full_patch[1].is_empty(); if has_no_changes && feedback_input.no_lgtm { log::debug!("Not posting an approved review because `no-lgtm` is true"); - dismissal.await; - return; + return dismissal.await; } let mut payload = FullReview { event: if feedback_input.passive_reviews { @@ -356,93 +388,118 @@ impl GithubApiClient { comments }; } - dismissal.await; // free up the `url` variable + dismissal.await?; // free up the `url` variable let request = Self::make_api_request( &self.client, - url, + url.clone(), Method::POST, Some( serde_json::to_string(&payload) - .expect("Failed to serialize PR review to json string"), + .with_context(|| "Failed to serialize PR review to json string")?, ), None, - ); - let response = Self::send_api_request( + )?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.clone(), 0, ) - .await; - if response.is_none() || response.is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to post a new PR review"); + .await + { + Ok(response) => { + if !response.status().is_success() { + Self::log_response(response, "Failed to post a new PR review").await; + } + } + Err(e) => { + log::error!("Failed to post a new PR review: {e:?}"); + } } + Ok(()) } /// Dismiss any outdated reviews generated by cpp-linter. - async fn dismiss_outdated_reviews(&self, url: &Url) { - let mut url_ = Some( - Url::parse_with_params(url.as_str(), [("page", "1")]) - .expect("Failed to parse endpoint for getting existing PR reviews"), - ); + async fn dismiss_outdated_reviews(&self, url: &Url) -> Result<()> { + let mut url_ = Some(Url::parse_with_params(url.as_str(), [("page", "1")])?); while let Some(ref endpoint) = url_ { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); - let response = Self::send_api_request( + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; + let result = Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.clone(), 0, ) .await; - if response.is_none() || response.as_ref().is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to get a list of existing PR reviews"); - return; - } - let response = response.unwrap(); - url_ = Self::try_next_page(&response.headers); - let payload: Vec = serde_json::from_str(&response.text) - .expect("Unable to deserialize malformed JSON about review comments"); - for review in payload { - if let Some(body) = &review.body { - if body.starts_with(COMMENT_MARKER) - && !(["PENDING", "DISMISSED"].contains(&review.state.as_str())) - { - // dismiss outdated review - let req = Self::make_api_request( - &self.client, - url.join("reviews/") - .unwrap() - .join(review.id.to_string().as_str()) - .expect("Failed to parse URL for dismissing outdated review."), - Method::PUT, - Some( - serde_json::json!( + match result { + Err(e) => { + log::error!("Failed to get a list of existing PR reviews: {e:?}"); + return Ok(()); + } + Ok(response) => { + if !response.status().is_success() { + Self::log_response(response, "Failed to get a list of existing PR reviews") + .await; + return Ok(()); + } + url_ = Self::try_next_page(response.headers()); + match serde_json::from_str::>(&response.text().await?) { + Err(e) => { + log::error!("Unable to serialize JSON about review comments: {e:?}"); + return Ok(()); + } + Ok(payload) => { + for review in payload { + if let Some(body) = &review.body { + if body.starts_with(COMMENT_MARKER) + && !(["PENDING", "DISMISSED"] + .contains(&review.state.as_str())) { - "message": "outdated suggestion", - "event": "DISMISS" + // dismiss outdated review + if let Ok(dismiss_url) = url.join( + format!("reviews/{}/dismissals", review.id).as_str(), + ) { + if let Ok(req) = Self::make_api_request( + &self.client, + dismiss_url, + Method::PUT, + Some(REVIEW_DISMISSAL.to_string()), + None, + ) { + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.clone(), + 0, + ) + .await + { + Ok(result) => { + if !result.status().is_success() { + Self::log_response( + result, + "Failed to dismiss outdated review", + ) + .await; + } + } + Err(e) => { + log::error!( + "Failed to dismiss outdated review: {e:}" + ); + } + } + } + } } - ) - .to_string(), - ), - None, - ); - let result = Self::send_api_request( - self.client.clone(), - req, - false, - self.rate_limit_headers.clone(), - 0, - ) - .await; - if result.is_none() || result.is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to dismiss outdated review"); + } + } } } } } } + Ok(()) } } diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index b0059cd..4c9da48 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -3,16 +3,17 @@ //! //! Currently, only Github is supported. +use std::fmt::Debug; use std::future::Future; use std::path::PathBuf; use std::sync::{Arc, Mutex}; use std::time::Duration; // non-std crates +use anyhow::{anyhow, Error, Result}; use chrono::DateTime; -use futures::future::{BoxFuture, FutureExt}; use reqwest::header::{HeaderMap, HeaderValue}; -use reqwest::{Client, IntoUrl, Method, Request, Response, StatusCode, Url}; +use reqwest::{Client, IntoUrl, Method, Request, Response, Url}; // project specific modules pub mod github; @@ -20,7 +21,11 @@ use crate::cli::FeedbackInput; use crate::common_fs::{FileFilter, FileObj}; pub static COMMENT_MARKER: &str = "\n"; -pub static USER_OUTREACH: &str = "\n\nHave any feedback or feature suggestions? [Share it here.](https://github.com/cpp-linter/cpp-linter-action/issues)"; +pub static USER_OUTREACH: &str = concat!( + "\n\nHave any feedback or feature suggestions? [Share it here.]", + "(https://github.com/cpp-linter/cpp-linter-action/issues)" +); +pub static USER_AGENT: &str = concat!("cpp-linter/", env!("CARGO_PKG_VERSION")); /// A structure to contain the different forms of headers that /// describe a REST API's rate limit status. @@ -34,28 +39,6 @@ pub struct RestApiRateLimitHeaders { pub retry: String, } -pub struct CachedResponse { - pub text: String, - pub headers: HeaderMap, - pub status: StatusCode, -} - -impl CachedResponse { - async fn from(response: Response) -> Self { - let headers = response.headers().to_owned(); - let status = response.status(); - let text = response - .text() - .await - .expect("Failed to decode response body to text."); - Self { - text, - headers, - status, - } - } -} - /// A custom trait that templates necessary functionality with a Git server's REST API. pub trait RestApiClient { /// A way to set output variables specific to cpp_linter executions in CI. @@ -70,7 +53,7 @@ pub trait RestApiClient { /// /// If an authentication token is provided (via environment variable), /// this method shall include the relative information. - fn make_headers() -> HeaderMap; + fn make_headers() -> Result>; /// Construct a HTTP request to be sent. /// @@ -86,8 +69,8 @@ pub trait RestApiClient { /// let response = Self::send_api_request( /// self.client.clone(), /// request, - /// false, // false means don't panic - /// 0, // start recursion count at 0 + /// self.rest_api_headers.clone(), + /// 0, // start recursion count at 0 (max iterations is 4) /// ); /// match response.await { /// Some(res) => {/* handle response */} @@ -100,7 +83,7 @@ pub trait RestApiClient { method: Method, data: Option, headers: Option, - ) -> Request { + ) -> Result { let mut req = client.request(method, url); if let Some(h) = headers { req = req.headers(h); @@ -108,7 +91,9 @@ pub trait RestApiClient { if let Some(d) = data { req = req.body(d); } - req.build().expect("Failed to create a HTTP request") + // RequestBuilder only fails to `build()` if there is a malformed `url`. We + // should be safe here because of this function's `url` parameter type. + req.build().map_err(Error::from) } /// A convenience function to send HTTP requests and respect a REST API rate limits. @@ -117,110 +102,96 @@ pub trait RestApiClient { /// Recursion is needed when a secondary rate limit is hit. The server tells the client that /// it should back off and retry after a specified time interval. /// - /// Setting the `strict` parameter to true will panic when the HTTP request fails to send or - /// the HTTP response's status code does not describe success. This should only be used for - /// requests that are vital to the app operation. - /// With `strict` as true, the returned type is guaranteed to be a [`Some`] value. - /// If `strict` is false, then a failure to send the request is returned as a [`None`] value, - /// and a [`Some`] value could also indicate if the server failed to process the request. + /// Note, pass `0` to the `retries` parameter, which is used to count recursive iterations. + /// This function will recur a maximum of 4 times, and this only happens when the response's + /// headers includes a retry interval. fn send_api_request( client: Client, request: Request, - strict: bool, rate_limit_headers: RestApiRateLimitHeaders, - retries: u64, - ) -> BoxFuture<'static, Option> { + retries: u8, + ) -> impl Future> + Send { async move { - match client - .execute( - request - .try_clone() - .expect("Could not clone HTTP request object"), - ) - .await - { - Ok(response) => { + static MAX_RETRIES: u8 = 5; + for i in retries..MAX_RETRIES { + let result = client + .execute(request.try_clone().ok_or(anyhow!( + "Failed to clone request object for recursive behavior" + ))?) + .await; + if let Ok(response) = &result { if [403u16, 429u16].contains(&response.status().as_u16()) { - // rate limit exceeded + // rate limit may have been exceeded // check if primary rate limit was violated; panic if so. - let remaining = response - .headers() - .get(&rate_limit_headers.remaining) - .expect("Response headers do not include remaining API usage count") - .to_str() - .expect("Failed to extract remaining attempts about rate limit") - .parse::() - .expect("Failed to parse i64 from remaining attempts about rate limit"); - if remaining <= 0 { - let reset = DateTime::from_timestamp( - response - .headers() - .get(&rate_limit_headers.reset) - .expect("response headers does not include a reset timestamp") - .to_str() - .expect("Failed to extract reset info about rate limit") - .parse::() - .expect("Failed to parse i64 from reset time about rate limit"), - 0, - ) - .expect("rate limit reset UTC timestamp is an invalid"); - panic!("REST API rate limit exceeded! Resets at {}", reset); + let mut requests_remaining = None; + if let Some(remaining) = + response.headers().get(&rate_limit_headers.remaining) + { + if let Ok(count) = remaining.to_str() { + if let Ok(value) = count.parse::() { + requests_remaining = Some(value); + } else { + log::debug!( + "Failed to parse i64 from remaining attempts about rate limit: {count}" + ); + } + } + } else { + // NOTE: I guess it is sometimes valid for a request to + // not include remaining rate limit attempts + log::debug!( + "Response headers do not include remaining API usage count" + ); + } + if requests_remaining.is_some_and(|v| v <= 0) { + if let Some(reset_value) = + response.headers().get(&rate_limit_headers.reset) + { + if let Ok(epoch) = reset_value.to_str() { + if let Ok(value) = epoch.parse::() { + if let Some(reset) = DateTime::from_timestamp(value, 0) { + return Err(anyhow!( + "REST API rate limit exceeded! Resets at {}", + reset + )); + } + } else { + log::debug!( + "Failed to parse i64 from reset time about rate limit: {epoch}" + ); + } + } + } else { + log::debug!("Response headers does not include a reset timestamp"); + } + return Err(anyhow!("REST API rate limit exceeded!")); } // check if secondary rate limit is violated; backoff and try again. - if retries > 4 { - panic!("REST API secondary rate limit exceeded"); - } - if let Some(retry) = response.headers().get(&rate_limit_headers.retry) { - let interval = Duration::from_secs( - retry - .to_str() - .expect("Failed to extract retry interval about rate limit") - .parse::() - .expect( - "Failed to parse u64 from retry interval about rate limit", - ) - + retries.pow(2), - ); - tokio::time::sleep(interval).await; - return Self::send_api_request( - client, - request, - strict, - rate_limit_headers, - retries + 1, - ) - .await; - } - } - let cached_response = CachedResponse::from(response).await; - if !cached_response.status.is_success() { - let summary = format!( - "Got {} response from {} request to {}:\n{}", - cached_response.status.as_u16(), - request.method().as_str(), - request.url().as_str(), - cached_response.text, - ); - if strict { - panic!("{summary}"); - } else { - log::error!("{summary}"); + if let Some(retry_value) = response.headers().get(&rate_limit_headers.retry) + { + if let Ok(retry_str) = retry_value.to_str() { + if let Ok(retry) = retry_str.parse::() { + let interval = Duration::from_secs(retry + (i as u64).pow(2)); + tokio::time::sleep(interval).await; + } else { + log::debug!( + "Failed to parse u64 from retry interval about rate limit: {retry_str}" + ); + } + } + continue; } } - Some(cached_response) - } - Err(e) => { - if strict { - panic!("Failed to complete the HTTP request.\n{}", e); - } else { - None - } + return result.map_err(Error::from); } + return result.map_err(Error::from); } + Err(anyhow!( + "REST API secondary rate limit exceeded after {MAX_RETRIES} retries." + )) } - .boxed() } /// A way to get the list of changed files using REST API calls. It is this method's @@ -231,7 +202,7 @@ pub trait RestApiClient { fn get_list_of_changed_files( &self, file_filter: &FileFilter, - ) -> impl Future>; + ) -> impl Future>>; /// A way to get the list of changed files using REST API calls that employ a paginated response. /// @@ -241,7 +212,7 @@ pub trait RestApiClient { &self, url: Url, file_filter: &FileFilter, - ) -> impl Future>; + ) -> impl Future>>; /// Makes a comment in MarkDown syntax based on the concerns in `format_advice` and /// `tidy_advice` about the given set of `files`. @@ -303,34 +274,44 @@ pub trait RestApiClient { &self, files: &[Arc>], user_inputs: FeedbackInput, - ) -> impl Future; + ) -> impl Future>; /// Gets the URL for the next page in a paginated response. /// /// Returns [`None`] if current response is the last page. fn try_next_page(headers: &HeaderMap) -> Option { - if headers.contains_key("link") { - let pages = headers["link"] - .to_str() - .expect("Failed to convert header value of links to a str") - .split(", "); - for page in pages { - if page.ends_with("; rel=\"next\"") { - let url = page - .split_once(">;") - .expect("header link for pagination is malformed") - .0 - .trim_start_matches("<") - .to_string(); - return Some( - Url::parse(&url) - .expect("Failed to parse next page link from response header"), - ); + if let Some(links) = headers.get("link") { + if let Ok(pg_str) = links.to_str() { + let pages = pg_str.split(", "); + for page in pages { + if page.ends_with("; rel=\"next\"") { + if let Some(link) = page.split_once(">;") { + let url = link.0.trim_start_matches("<").to_string(); + if let Ok(next) = Url::parse(&url) { + return Some(next); + } else { + log::debug!("Failed to parse next page link from response header"); + } + } else { + log::debug!("Response header link for pagination is malformed"); + } + } } } } None } + + fn log_response(response: Response, context: &str) -> impl Future + Send { + async move { + if let Err(e) = response.error_for_status_ref() { + log::error!("{}: {e:?}", context.to_owned()); + if let Ok(text) = response.text().await { + log::error!("{text}"); + } + } + } + } } fn make_format_comment( @@ -390,7 +371,7 @@ fn make_tidy_comment( rationale = tidy_note.rationale, concerned_code = if tidy_note.suggestion.is_empty() {String::from("")} else { format!("\n ```{ext}\n {suggestion}\n ```\n", - ext = file_path.extension().expect("file extension was not determined").to_string_lossy(), + ext = file_path.extension().unwrap_or_default().to_string_lossy(), suggestion = tidy_note.suggestion.join("\n "), ).to_string() }, @@ -408,3 +389,259 @@ fn make_tidy_comment( comment.push_str(&tidy_comment); comment.push_str(&closer); } + +/// This module tests the silent errors' debug logs +/// from `try_next_page()` and `send_api_request()` functions. +#[cfg(test)] +mod test { + use std::sync::{Arc, Mutex}; + + use anyhow::{anyhow, Result}; + use chrono::Utc; + use mockito::{Matcher, Server}; + use reqwest::{ + header::{HeaderMap, HeaderValue}, + Client, + }; + use reqwest::{Method, Url}; + + use crate::{ + cli::FeedbackInput, + common_fs::{FileFilter, FileObj}, + logger, + }; + + use super::{RestApiClient, RestApiRateLimitHeaders}; + + /// A dummy struct to impl RestApiClient + #[derive(Default)] + struct TestClient {} + + impl RestApiClient for TestClient { + fn set_exit_code( + &self, + _checks_failed: u64, + _format_checks_failed: Option, + _tidy_checks_failed: Option, + ) -> u64 { + 0 + } + + fn make_headers() -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn get_list_of_changed_files( + &self, + _file_filter: &FileFilter, + ) -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn get_changed_files_paginated( + &self, + _url: reqwest::Url, + _file_filter: &FileFilter, + ) -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn post_feedback( + &self, + _files: &[Arc>], + _user_inputs: FeedbackInput, + ) -> Result { + Err(anyhow!("Not implemented")) + } + } + + #[tokio::test] + async fn dummy_coverage() { + assert!(TestClient::make_headers().is_err()); + let dummy = TestClient::default(); + assert_eq!(dummy.set_exit_code(1, None, None), 0); + assert!(dummy + .get_list_of_changed_files(&FileFilter::new(&[], vec![])) + .await + .is_err()); + assert!(dummy + .get_changed_files_paginated( + Url::parse("https://example.net").unwrap(), + &FileFilter::new(&[], vec![]) + ) + .await + .is_err()); + assert!(dummy + .post_feedback(&[], FeedbackInput::default()) + .await + .is_err()) + } + + // ************************************************* try_next_page() tests + + #[test] + fn bad_link_header() { + let mut headers = HeaderMap::with_capacity(1); + assert!(headers + .insert("link", HeaderValue::from_str("; rel=\"next\"").unwrap()) + .is_none()); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + let result = TestClient::try_next_page(&headers); + assert!(result.is_none()); + } + + #[test] + fn bad_link_domain() { + let mut headers = HeaderMap::with_capacity(1); + assert!(headers + .insert( + "link", + HeaderValue::from_str("; rel=\"next\"").unwrap() + ) + .is_none()); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + let result = TestClient::try_next_page(&headers); + assert!(result.is_none()); + } + + // ************************************************* Rate Limit Tests + + #[derive(Default)] + struct RateLimitTestParams { + secondary: bool, + has_remaining_count: bool, + bad_remaining_count: bool, + has_reset_timestamp: bool, + bad_reset_timestamp: bool, + has_retry_interval: bool, + bad_retry_interval: bool, + } + + async fn simulate_rate_limit(test_params: &RateLimitTestParams) { + let rate_limit_headers = RestApiRateLimitHeaders { + reset: "reset".to_string(), + remaining: "remaining".to_string(), + retry: "retry".to_string(), + }; + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + + let mut server = Server::new_async().await; + let client = Client::new(); + let reset_timestamp = (Utc::now().timestamp() + 60).to_string(); + let mut mock = server + .mock("GET", "/") + .match_body(Matcher::Any) + .expect_at_least(1) + .expect_at_most(5) + .with_status(429); + if test_params.has_remaining_count { + mock = mock.with_header( + &rate_limit_headers.remaining, + if test_params.secondary { + "1" + } else if test_params.bad_remaining_count { + "X" + } else { + "0" + }, + ); + } + if test_params.has_reset_timestamp { + mock = mock.with_header( + &rate_limit_headers.reset, + if test_params.bad_reset_timestamp { + "X" + } else { + &reset_timestamp + }, + ); + } + if test_params.secondary && test_params.has_retry_interval { + mock.with_header( + &rate_limit_headers.retry, + if test_params.bad_retry_interval { + "X" + } else { + "0" + }, + ) + .create(); + } else { + mock.create(); + } + let request = + TestClient::make_api_request(&client, server.url(), Method::GET, None, None).unwrap(); + TestClient::send_api_request(client.clone(), request, rate_limit_headers.clone(), 0) + .await + .unwrap(); + } + + #[tokio::test] + #[should_panic(expected = "REST API secondary rate limit exceeded")] + async fn rate_limit_secondary() { + simulate_rate_limit(&RateLimitTestParams { + secondary: true, + has_retry_interval: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[should_panic(expected = "REST API secondary rate limit exceeded")] + async fn rate_limit_bad_retry() { + simulate_rate_limit(&RateLimitTestParams { + secondary: true, + has_retry_interval: true, + bad_retry_interval: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_primary() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + has_reset_timestamp: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_no_reset() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_bad_reset() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + has_reset_timestamp: true, + bad_reset_timestamp: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + async fn rate_limit_bad_count() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + bad_remaining_count: true, + ..Default::default() + }) + .await; + } +} diff --git a/cpp-linter/src/run.rs b/cpp-linter/src/run.rs index c2bcb0c..42cc050 100644 --- a/cpp-linter/src/run.rs +++ b/cpp-linter/src/run.rs @@ -8,6 +8,7 @@ use std::path::Path; use std::sync::{Arc, Mutex}; // non-std crates +use anyhow::{anyhow, Result}; use log::{set_max_level, LevelFilter}; #[cfg(feature = "openssl-vendored")] use openssl_probe; @@ -21,14 +22,11 @@ use crate::rest_api::{github::GithubApiClient, RestApiClient}; const VERSION: &str = env!("CARGO_PKG_VERSION"); -#[cfg(feature = "openssl-vendored")] fn probe_ssl_certs() { + #[cfg(feature = "openssl-vendored")] openssl_probe::init_ssl_cert_env_vars(); } -#[cfg(not(feature = "openssl-vendored"))] -fn probe_ssl_certs() {} - /// This is the backend entry point for console applications. /// /// The idea here is that all functionality is implemented in Rust. However, passing @@ -43,7 +41,7 @@ fn probe_ssl_certs() {} /// is used instead of python's `sys.argv`, then the list of strings includes the entry point /// alias ("path/to/cpp-linter.exe"). Thus, the parser in [`crate::cli`] will halt on an error /// because it is not configured to handle positional arguments. -pub async fn run_main(args: Vec) -> i32 { +pub async fn run_main(args: Vec) -> Result<()> { probe_ssl_certs(); let arg_parser = get_arg_parser(); @@ -52,7 +50,7 @@ pub async fn run_main(args: Vec) -> i32 { if args.subcommand_matches("version").is_some() { println!("cpp-linter v{}", VERSION); - return 0; + return Ok(()); } logger::init().unwrap(); @@ -60,7 +58,7 @@ pub async fn run_main(args: Vec) -> i32 { if cli.version == "NO-VERSION" { log::error!("The `--version` arg is used to specify which version of clang to use."); log::error!("To get the cpp-linter version, use `cpp-linter version` sub-command."); - return 1; + return Err(anyhow!("Clang version not specified.")); } if cli.repo_root != "." { @@ -68,7 +66,7 @@ pub async fn run_main(args: Vec) -> i32 { .unwrap_or_else(|_| panic!("'{}' is inaccessible or does not exist", cli.repo_root)); } - let rest_api_client = GithubApiClient::new(); + let rest_api_client = GithubApiClient::new()?; set_max_level(if cli.verbosity || rest_api_client.debug_enabled { LevelFilter::Debug } else { @@ -100,14 +98,14 @@ pub async fn run_main(args: Vec) -> i32 { // parse_diff(github_rest_api_payload) rest_api_client .get_list_of_changed_files(&file_filter) - .await + .await? } else { // walk the folder and look for files with specified extensions according to ignore values. - let mut all_files = file_filter.list_source_files("."); + let mut all_files = file_filter.list_source_files(".")?; if rest_api_client.event_name == "pull_request" && (cli.tidy_review || cli.format_review) { let changed_files = rest_api_client .get_list_of_changed_files(&file_filter) - .await; + .await?; for changed_file in changed_files { for file in &mut all_files { if changed_file.name == file.name { @@ -130,14 +128,20 @@ pub async fn run_main(args: Vec) -> i32 { let mut clang_params = ClangParams::from(&cli); let user_inputs = FeedbackInput::from(&cli); - capture_clang_tools_output(&mut arc_files, cli.version.as_str(), &mut clang_params).await; + capture_clang_tools_output(&mut arc_files, cli.version.as_str(), &mut clang_params).await?; start_log_group(String::from("Posting feedback")); - let checks_failed = rest_api_client.post_feedback(&arc_files, user_inputs).await; + let checks_failed = rest_api_client + .post_feedback(&arc_files, user_inputs) + .await?; end_log_group(); if env::var("PRE_COMMIT").is_ok_and(|v| v == "1") { - return (checks_failed > 1) as i32; + if checks_failed > 1 { + return Err(anyhow!("Some checks did not pass")); + } else { + return Ok(()); + } } - 0 + Ok(()) } #[cfg(test)] @@ -146,59 +150,64 @@ mod test { use std::env; #[tokio::test] - async fn run() { + async fn normal() { + env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "--repo-root".to_string(), + "tests".to_string(), + "demo/demo.cpp".to_string(), + ]) + .await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn version_command() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "--repo-root".to_string(), - "tests".to_string(), - "demo/demo.cpp".to_string(), - ]) - .await, - 0 - ); + let result = run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await; + assert!(result.is_ok()); } #[tokio::test] - async fn run_version_command() { + async fn force_debug_output() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await, - 0 - ); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-v".to_string(), + "debug".to_string(), + ]) + .await; + assert!(result.is_ok()); } #[tokio::test] - async fn run_force_debug_output() { + async fn bad_version_input() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "-v".to_string(), - "debug".to_string(), - ]) - .await, - 0 - ); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-V".to_string(), + ]) + .await; + assert!(result.is_err()); } #[tokio::test] - async fn run_bad_version_input() { + async fn pre_commit_env() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "-V".to_string() - ]) - .await, - 1 - ); + env::set_var("PRE_COMMIT", "1"); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + ]) + .await; + assert!(result.is_err()); } } diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index e4856b9..99cc46c 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -42,6 +42,7 @@ struct TestParams { pub fail_get_existing_comments: bool, pub fail_dismissal: bool, pub fail_posting: bool, + pub bad_existing_comments: bool, } impl Default for TestParams { @@ -55,6 +56,7 @@ impl Default for TestParams { fail_get_existing_comments: false, fail_dismissal: false, fail_posting: false, + bad_existing_comments: false, } } } @@ -94,7 +96,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", format!("/repos/{REPO}/{diff_end_point}").as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_body_from_file(format!("{asset_path}patch.diff")) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -102,28 +104,31 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } if test_params.event_t == EventType::Push { - mocks.push( - server - .mock( - "GET", - format!("/repos/{REPO}/commits/{SHA}/comments/").as_str(), - ) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) - .match_body(Matcher::Any) - .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) - .with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_status(if test_params.fail_get_existing_comments { - 403 - } else { - 200 - }) - .create(), - ); + let mut mock = server + .mock( + "GET", + format!("/repos/{REPO}/commits/{SHA}/comments").as_str(), + ) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_body(Matcher::Any) + .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_status(if test_params.fail_get_existing_comments { + 403 + } else { + 200 + }); + if test_params.bad_existing_comments { + mock = mock.with_body(String::new()); + } else { + mock = mock.with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")); + } + mock = mock.create(); + mocks.push(mock); } else { - let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments/"); + let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments"); for pg in ["1", "2"] { let link = if pg == "1" { format!("<{}{pr_endpoint}?page=2>; rel=\"next\"", server.url()) @@ -134,7 +139,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .match_body(Matcher::Any) .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) .with_body_from_file(format!("{asset_path}pr_comments_pg{pg}.json")) @@ -146,13 +151,21 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } } - let comment_url = format!("/repos/{REPO}/comments/76453652"); + let comment_url = format!( + "/repos/{REPO}{}/comments/76453652", + if test_params.event_t == EventType::PullRequest { + "/issues" + } else { + "" + } + ); - if !test_params.fail_get_existing_comments { + if !test_params.fail_get_existing_comments && !test_params.bad_existing_comments { mocks.push( server .mock("DELETE", comment_url.as_str()) .match_body(Matcher::Any) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_status(if test_params.fail_dismissal { 403 } else { 200 }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -177,6 +190,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("PATCH", comment_url.as_str()) .match_body(new_comment_match.clone()) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -184,13 +198,16 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } - if test_params.thread_comments == ThreadComments::On && lgtm_allowed { + if test_params.thread_comments == ThreadComments::On + && lgtm_allowed + && !test_params.bad_existing_comments + { mocks.push( server .mock( "POST", format!( - "/repos/{REPO}/{}/comments/", + "/repos/{REPO}/{}/comments", if test_params.event_t == EventType::PullRequest { format!("issues/{PR}") } else { @@ -200,6 +217,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { .as_str(), ) .match_body(new_comment_match) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) @@ -223,7 +241,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert_eq!(result, 0); + assert!(result.is_ok()); for mock in mocks { mock.assert(); } @@ -395,3 +413,13 @@ async fn fail_posting() { }) .await; } + +#[tokio::test] +async fn bad_existing_comments() { + test_comment(&TestParams { + bad_existing_comments: true, + force_lgtm: true, + ..Default::default() + }) + .await; +} diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index 32d8abd..682e170 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -6,30 +6,43 @@ use tempfile::{NamedTempFile, TempDir}; use cpp_linter::{ common_fs::FileFilter, + logger, rest_api::{github::GithubApiClient, RestApiClient}, }; use std::{env, io::Write, path::Path}; -#[derive(PartialEq)] +#[derive(PartialEq, Default)] enum EventType { + #[default] Push, - PullRequest(u64), + PullRequest, +} + +#[derive(Default)] +struct TestParams { + event_t: EventType, + fail_serde_diff: bool, + fail_serde_event_payload: bool, + no_event_payload: bool, } const REPO: &str = "cpp-linter/test-cpp-linter-action"; const SHA: &str = "DEADBEEF"; +const PR: u8 = 42; const TOKEN: &str = "123456"; +const EVENT_PAYLOAD: &str = r#"{"number": 42}"#; const RESET_RATE_LIMIT_HEADER: &str = "x-ratelimit-reset"; const REMAINING_RATE_LIMIT_HEADER: &str = "x-ratelimit-remaining"; +const MALFORMED_RESPONSE_PAYLOAD: &str = "{\"message\":\"Resource not accessible by integration\"}"; -async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { +async fn get_paginated_changes(lib_root: &Path, test_params: &TestParams) { env::set_var("GITHUB_REPOSITORY", REPO); env::set_var("GITHUB_SHA", SHA); env::set_var("GITHUB_TOKEN", TOKEN); env::set_var("CI", "true"); env::set_var( "GITHUB_EVENT_NAME", - if event_type == EventType::Push { + if test_params.event_t == EventType::Push { "push" } else { "pull_request" @@ -38,14 +51,20 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { let tmp = TempDir::new().expect("Failed to create a temp dir for test"); let mut event_payload = NamedTempFile::new_in(tmp.path()) .expect("Failed to spawn a tmp file for test event payload"); - env::set_var("GITHUB_EVENT_PATH", event_payload.path()); - if let EventType::PullRequest(pr_number) = event_type { + env::set_var( + "GITHUB_EVENT_PATH", + if test_params.no_event_payload { + Path::new("no a file.txt") + } else { + event_payload.path() + }, + ); + if EventType::PullRequest == test_params.event_t + && !test_params.fail_serde_event_payload + && !test_params.no_event_payload + { event_payload - .write_all( - serde_json::json!({"number": pr_number}) - .to_string() - .as_bytes(), - ) + .write_all(EVENT_PAYLOAD.as_bytes()) .expect("Failed to write data to test event payload file") } @@ -55,13 +74,20 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { let mut server = mock_server().await; env::set_var("GITHUB_API_URL", server.url()); env::set_current_dir(tmp.path()).unwrap(); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); let gh_client = GithubApiClient::new(); + if test_params.fail_serde_event_payload || test_params.no_event_payload { + assert!(gh_client.is_err()); + return; + } + let client = gh_client.unwrap(); let mut mocks = vec![]; let diff_end_point = format!( "/repos/{REPO}/{}", - if let EventType::PullRequest(pr) = event_type { - format!("pulls/{pr}") + if EventType::PullRequest == test_params.event_t { + format!("pulls/{PR}") } else { format!("commits/{SHA}") } @@ -70,76 +96,131 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { server .mock("GET", diff_end_point.as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(403) .create(), ); - let pg_end_point = if event_type == EventType::Push { + let pg_end_point = if test_params.event_t == EventType::Push { diff_end_point.clone() } else { format!("{diff_end_point}/files") }; - for pg in 1..=2 { + let pg_count = if test_params.fail_serde_diff { 1 } else { 2 }; + for pg in 1..=pg_count { let link = if pg == 1 { format!("<{}{pg_end_point}?page=2>; rel=\"next\"", server.url()) } else { "".to_string() }; - mocks.push( - server - .mock("GET", pg_end_point.as_str()) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) - .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_body_from_file(format!( - "{asset_path}/{}_files_pg{pg}.json", - if event_type == EventType::Push { - "push" - } else { - "pull_request" - } - )) - .with_header("link", link.as_str()) - .create(), - ); + let mut mock = server + .mock("GET", pg_end_point.as_str()) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_header("link", link.as_str()); + if test_params.fail_serde_diff { + mock = mock.with_body(MALFORMED_RESPONSE_PAYLOAD); + } else { + mock = mock.with_body_from_file(format!( + "{asset_path}/{}_files_pg{pg}.json", + if test_params.event_t == EventType::Push { + "push" + } else { + "pull_request" + } + )); + } + mocks.push(mock.create()); } let file_filter = FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]); - let files = gh_client.get_list_of_changed_files(&file_filter).await; - assert_eq!(files.len(), 2); - for file in files { - assert!(["src/demo.cpp", "src/demo.hpp"].contains( - &file - .name - .as_path() - .to_str() - .expect("Failed to get file name from path") - )); + let files = client.get_list_of_changed_files(&file_filter).await; + match files { + Err(e) => { + if !test_params.fail_serde_diff { + panic!("Failed to get changed files: {e:?}"); + } + } + Ok(files) => { + assert_eq!(files.len(), 2); + for file in files { + assert!(["src/demo.cpp", "src/demo.hpp"].contains( + &file + .name + .as_path() + .to_str() + .expect("Failed to get file name from path") + )); + } + } } for mock in mocks { mock.assert(); } } -async fn test_get_changes(event_type: EventType) { +async fn test_get_changes(test_params: &TestParams) { let tmp_dir = create_test_space(false); let lib_root = env::current_dir().unwrap(); env::set_current_dir(tmp_dir.path()).unwrap(); - get_paginated_changes(&lib_root, event_type).await; + get_paginated_changes(&lib_root, test_params).await; env::set_current_dir(lib_root.as_path()).unwrap(); drop(tmp_dir); } #[tokio::test] async fn get_push_files_paginated() { - test_get_changes(EventType::Push).await + test_get_changes(&TestParams::default()).await } #[tokio::test] async fn get_pr_files_paginated() { - test_get_changes(EventType::PullRequest(42)).await + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn fail_push_files_paginated() { + test_get_changes(&TestParams { + fail_serde_diff: true, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn fail_pr_files_paginated() { + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + fail_serde_diff: true, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn fail_event_payload() { + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + fail_serde_event_payload: true, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn no_event_payload() { + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + no_event_payload: true, + ..Default::default() + }) + .await } diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index 9d8a2fe..a949f6c 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -35,6 +35,8 @@ struct TestParams { pub fail_dismissal: bool, pub fail_get_existing_reviews: bool, pub fail_posting: bool, + pub bad_pr_info: bool, + pub bad_existing_reviews: bool, } impl Default for TestParams { @@ -52,6 +54,8 @@ impl Default for TestParams { fail_dismissal: false, fail_get_existing_reviews: false, fail_posting: false, + bad_pr_info: false, + bad_existing_reviews: false, } } } @@ -84,7 +88,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_body_from_file(format!("{asset_path}pr_{PR}.diff")) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -94,10 +98,12 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) - .with_body( - json!({"state": test_params.pr_state, "draft": test_params.pr_draft}).to_string(), - ) + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .with_body(if test_params.bad_pr_info { + String::new() + } else { + json!({"state": test_params.pr_state, "draft": test_params.pr_draft}).to_string() + }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .create(), @@ -105,28 +111,36 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { let reviews_endpoint = format!("/repos/{REPO}/pulls/{PR}/reviews"); - mocks.push( - server - .mock("GET", reviews_endpoint.as_str()) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) - .match_body(Matcher::Any) - .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + let mut mock = server + .mock("GET", reviews_endpoint.as_str()) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_body(Matcher::Any) + .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_status(if test_params.fail_get_existing_reviews { + 403 + } else { + 200 + }); + if test_params.bad_existing_reviews { + mock = mock.with_body(String::new()).create(); + } else { + mock = mock .with_body_from_file(format!("{asset_path}pr_reviews.json")) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_status(if test_params.fail_get_existing_reviews { - 403 - } else { - 200 - }) - .create(), - ); - if !test_params.fail_get_existing_reviews { + .create() + } + mocks.push(mock); + if !test_params.fail_get_existing_reviews && !test_params.bad_existing_reviews { mocks.push( server - .mock("PUT", format!("{reviews_endpoint}/1807607546").as_str()) + .mock( + "PUT", + format!("{reviews_endpoint}/1807607546/dismissals").as_str(), + ) .match_body(r#"{"event":"DISMISS","message":"outdated suggestion"}"#) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_dismissal { 403 } else { 200 }) @@ -135,7 +149,11 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { } let lgtm_allowed = !test_params.force_lgtm || !test_params.no_lgtm; - if lgtm_allowed && !test_params.pr_draft && test_params.pr_state == "open" { + if lgtm_allowed + && !test_params.pr_draft + && test_params.pr_state == "open" + && !test_params.bad_pr_info + { let review_reaction = if test_params.passive_reviews { "COMMENT" } else if test_params.force_lgtm { @@ -170,6 +188,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("POST", reviews_endpoint.as_str()) .match_body(Matcher::Regex(expected_review_payload)) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) @@ -203,7 +222,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert_eq!(result, 0); + assert!(result.is_ok()); for mock in mocks { mock.assert(); } @@ -341,3 +360,25 @@ async fn fail_get_existing_reviews() { }) .await; } + +#[tokio::test] +async fn bad_existing_reviews() { + test_review(&TestParams { + lines_changed_only: LinesChangedOnly::Off, + force_lgtm: true, + bad_existing_reviews: true, + ..Default::default() + }) + .await; +} + +#[tokio::test] +async fn bad_pr_info() { + test_review(&TestParams { + lines_changed_only: LinesChangedOnly::Off, + force_lgtm: true, + bad_pr_info: true, + ..Default::default() + }) + .await; +} diff --git a/cspell.config.yml b/cspell.config.yml index d5a1fc0..800dd06 100644 --- a/cspell.config.yml +++ b/cspell.config.yml @@ -1,6 +1,7 @@ version: "0.2" language: en words: + - bindgen - binstall - bndy - Boolish @@ -10,6 +11,7 @@ words: - consts - cppcoreguidelines - cstdio + - cstdlib - Doherty - endfor - endgroup @@ -18,6 +20,7 @@ words: - Falsey - gitmodules - iomanip + - libc - libgit - markdownlint - maturin diff --git a/justfile b/justfile index 1d0cfef..000d796 100644 --- a/justfile +++ b/justfile @@ -1,11 +1,12 @@ set windows-shell := ["powershell.exe", "-NoLogo", "-Command"] + # run the test suite [group("code coverage")] -test arg='': +test profile='default': cargo llvm-cov --no-report \ nextest --manifest-path cpp-linter/Cargo.toml \ - --lib --tests --color always {{ arg }} + --lib --tests --color always --profile {{ profile }} # Clear previous test build artifacts [group("code coverage")] diff --git a/node-binding/Cargo.toml b/node-binding/Cargo.toml index d52fa46..bad3332 100644 --- a/node-binding/Cargo.toml +++ b/node-binding/Cargo.toml @@ -19,6 +19,7 @@ crate-type = ["cdylib"] napi = { version = "2.12.2", default-features = false, features = ["napi4", "async"] } napi-derive = "2.12.2" cpp-linter = { path = "../cpp-linter" } +anyhow = "1.0.89" [features] openssl-vendored = ["cpp-linter/openssl-vendored"] diff --git a/node-binding/src/lib.rs b/node-binding/src/lib.rs index 3cb83c0..b6bd5ca 100644 --- a/node-binding/src/lib.rs +++ b/node-binding/src/lib.rs @@ -1,8 +1,11 @@ #[macro_use] extern crate napi_derive; use ::cpp_linter::run::run_main; +use napi::bindgen_prelude::*; #[napi] -pub async fn main(args: Vec) -> i32 { - run_main(args).await +pub async fn main(args: Vec) -> Result<()> { + run_main(args) + .await + .map_err(|e| Error::new(Status::GenericFailure, e.to_string())) } diff --git a/py-binding/src/lib.rs b/py-binding/src/lib.rs index dbe62a0..a3538b0 100644 --- a/py-binding/src/lib.rs +++ b/py-binding/src/lib.rs @@ -1,16 +1,17 @@ -use pyo3::prelude::*; +use pyo3::{exceptions::PyOSError, prelude::*}; use tokio::runtime::Builder; use ::cpp_linter::run::run_main; /// A wrapper for the ``::cpp_linter::run::run_main()``` #[pyfunction] -fn main(args: Vec) -> PyResult { +fn main(args: Vec) -> PyResult<()> { Builder::new_multi_thread() .enable_all() .build() .unwrap() - .block_on(async { Ok(run_main(args).await) }) + .block_on(async { run_main(args).await }) + .map_err(|e| PyOSError::new_err(e.to_string())) } /// The python binding for the cpp_linter package. It only exposes a ``main()`` function