From 26ba400872e9b8e1f5ccde0acdba337aaff0ffc3 Mon Sep 17 00:00:00 2001 From: Ben Selwyn-Smith Date: Tue, 19 Aug 2025 13:22:26 +1000 Subject: [PATCH 1/3] feat: add local artifact support Signed-off-by: Ben Selwyn-Smith --- src/macaron/__main__.py | 87 ++++++++++++++---- src/macaron/artifact/local_artifact.py | 57 ++++++++---- src/macaron/config/global_config.py | 3 + src/macaron/slsa_analyzer/analyzer.py | 90 ++++++++++++++----- .../maven_central_registry.py | 23 ++--- tests/artifact/test_local_artifact.py | 23 ++++- .../failure_policy.dl | 10 +++ .../success_policy.dl | 10 +++ .../github_maven_artifact_local/test.yaml | 53 +++++++++++ 9 files changed, 281 insertions(+), 75 deletions(-) create mode 100644 tests/integration/cases/github_maven_artifact_local/failure_policy.dl create mode 100644 tests/integration/cases/github_maven_artifact_local/success_policy.dl create mode 100644 tests/integration/cases/github_maven_artifact_local/test.yaml diff --git a/src/macaron/__main__.py b/src/macaron/__main__.py index 9b746806e..ca0cd5f78 100644 --- a/src/macaron/__main__.py +++ b/src/macaron/__main__.py @@ -9,6 +9,7 @@ import os import sys from importlib import metadata as importlib_metadata +from typing import Any from jinja2 import Environment, FileSystemLoader, select_autoescape from packageurl import PackageURL @@ -96,6 +97,12 @@ def analyze_slsa_levels_single(analyzer_single_args: argparse.Namespace) -> None global_config.local_maven_repo = user_provided_local_maven_repo + # Set local artifact path. + if analyzer_single_args.local_artifact_path is not None and os.path.isfile( + analyzer_single_args.local_artifact_path + ): + global_config.local_artifact_path = analyzer_single_args.local_artifact_path + analyzer = Analyzer(global_config.output_path, global_config.build_log_path) # Initiate reporters. @@ -118,7 +125,6 @@ def analyze_slsa_levels_single(analyzer_single_args: argparse.Namespace) -> None analyzer.reporters.append(HTMLReporter()) analyzer.reporters.append(JSONReporter()) - run_config = {} repo_path = analyzer_single_args.repo_path purl = analyzer_single_args.package_url branch = analyzer_single_args.branch @@ -370,8 +376,25 @@ def main(argv: list[str] | None = None) -> None: # Add sub parsers for each action. sub_parser = main_parser.add_subparsers(dest="action", help="Run macaron --help for help") + # Dump the default values. + sub_parser.add_parser(name="dump-defaults", description="Dumps the defaults.ini file to the output directory.") + + # Add the sub parser commands. + _add_analyzer_parser(sub_parser) + _add_verify_policy_parser(sub_parser) + _add_find_source_parser(sub_parser) + + # Perform parsing. + args = parse_arguments(main_parser, argv) + + # Perform actions. + perform_action(args) + + +def _add_analyzer_parser(parser: Any) -> None: + """Add the analyzer commands to the parser.""" # Use Macaron to analyze one single repository. - single_analyze_parser = sub_parser.add_parser(name="analyze") + single_analyze_parser = parser.add_parser(name="analyze") single_analyze_parser.add_argument( "-sbom", @@ -390,7 +413,7 @@ def main(argv: list[str] | None = None) -> None: "--repo-path", required=False, type=str, - help=("The path to the repository, can be local or remote"), + help="The path to the repository, can be local or remote", ) single_analyze_parser.add_argument( @@ -411,7 +434,7 @@ def main(argv: list[str] | None = None) -> None: required=False, type=str, default="", - help=("The branch of the repository that we want to checkout. If not set, Macaron will use the default branch"), + help="The branch of the repository that we want to checkout. If not set, Macaron will use the default branch", ) single_analyze_parser.add_argument( @@ -430,14 +453,14 @@ def main(argv: list[str] | None = None) -> None: "-pe", "--provenance-expectation", required=False, - help=("The path to provenance expectation file or directory."), + help="The path to provenance expectation file or directory.", ) single_analyze_parser.add_argument( "-pf", "--provenance-file", required=False, - help=("The path to the provenance file in in-toto format."), + help="The path to the provenance file in in-toto format.", ) single_analyze_parser.add_argument( @@ -456,7 +479,7 @@ def main(argv: list[str] | None = None) -> None: required=False, type=str, default="", - help=("The path to the Jinja2 html template (please make sure to use .html or .j2 extensions)."), + help="The path to the Jinja2 html template (please make sure to use .html or .j2 extensions).", ) single_analyze_parser.add_argument( @@ -472,7 +495,8 @@ def main(argv: list[str] | None = None) -> None: "--local-maven-repo", required=False, help=( - "The path to the local .m2 directory. If this option is not used, Macaron will use the default location at $HOME/.m2" + "The path to the local .m2 directory. " + "If this option is not used, Macaron will use the default location at $HOME/.m2" ), ) @@ -480,36 +504,47 @@ def main(argv: list[str] | None = None) -> None: "--force-analyze-source", required=False, action="store_true", - help=("Forces PyPI sourcecode analysis to run regardless of other heuristic results."), + help="Forces PyPI sourcecode analysis to run regardless of other heuristic results.", ) single_analyze_parser.add_argument( "--verify-provenance", required=False, action="store_true", - help=("Allow the analysis to attempt to verify provenance files as part of its normal operations."), + help="Allow the analysis to attempt to verify provenance files as part of its normal operations.", ) - # Dump the default values. - sub_parser.add_parser(name="dump-defaults", description="Dumps the defaults.ini file to the output directory.") + single_analyze_parser.add_argument( + "-ap", + "--local-artifact-path", + required=False, + type=str, + help="The path to the local artifact file that should match the target software component being analyzed.", + ) + +def _add_verify_policy_parser(parser: Any) -> None: + """Add the verify policy commands parser.""" # Verify the Datalog policy. - vp_parser = sub_parser.add_parser(name="verify-policy") + vp_parser = parser.add_parser(name="verify-policy") vp_group = vp_parser.add_mutually_exclusive_group(required=True) vp_parser.add_argument("-d", "--database", required=True, type=str, help="Path to the database.") vp_group.add_argument("-f", "--file", type=str, help="Path to the Datalog policy.") vp_group.add_argument("-s", "--show-prelude", action="store_true", help="Show policy prelude.") + +def _add_find_source_parser(parser: Any) -> None: + """Add the find source commands parser.""" # Find the repo and commit of a passed PURL, or the commit of a passed PURL and repo. - find_parser = sub_parser.add_parser(name="find-source") + find_parser = parser.add_parser(name="find-source") find_parser.add_argument( "-purl", "--package-url", required=True, type=str, - help=("The PURL string to perform repository and commit finding for."), + help="The PURL string to perform repository and commit finding for.", ) find_parser.add_argument( @@ -523,10 +558,26 @@ def main(argv: list[str] | None = None) -> None: ), ) - args = main_parser.parse_args(argv) + +def parse_arguments(parser: argparse.ArgumentParser, argv: list[str] | None) -> argparse.Namespace: + """Parse the arguments of the argument parser. + + Parameters + ---------- + parser: argparse.ArgumentParser + The parser to use. + argv: list[str] + The list of arguments for the parser to parse. + + Returns + ------- + argparse.Namespace + The results of the argument parsing. + """ + args = parser.parse_args(argv) if not args.action: - main_parser.print_help() + parser.print_help() sys.exit(os.EX_USAGE) if args.verbose: @@ -587,7 +638,7 @@ def main(argv: list[str] | None = None) -> None: logger.error("Exiting because the defaults configuration could not be loaded.") sys.exit(os.EX_NOINPUT) - perform_action(args) + return args def _get_token_from_dict_or_env(token: str, token_dict: dict[str, str]) -> str: diff --git a/src/macaron/artifact/local_artifact.py b/src/macaron/artifact/local_artifact.py index 0df7e6248..8f3588b96 100644 --- a/src/macaron/artifact/local_artifact.py +++ b/src/macaron/artifact/local_artifact.py @@ -253,8 +253,33 @@ def get_local_artifact_dirs( raise LocalArtifactFinderError(f"Unsupported PURL type {purl_type}") -def get_local_artifact_hash(purl: PackageURL, artifact_dirs: list[str]) -> str | None: - """Compute the hash of the local artifact. +def get_artifact_hash_from_file(artifact_path: str) -> str | None: + """Compute the hash of the passed artifact. + + Parameters + ---------- + artifact_path: str + The path to the artifact. + + Returns + ------- + str | None + The artifact hash, or None if it could not be computed. + """ + if not os.path.isfile(artifact_path): + return None + + with open(artifact_path, "rb") as file: + try: + hash_result = hashlib.file_digest(file, "sha256") + return hash_result.hexdigest() + except ValueError as error: + logger.debug("Error while hashing file: %s", error) + return None + + +def get_artifact_hash_from_directory(purl: PackageURL, artifact_dirs: list[str]) -> tuple[str | None, str | None]: + """Compute the hash of a local artifact found within the passed directories. Parameters ---------- @@ -265,16 +290,16 @@ def get_local_artifact_hash(purl: PackageURL, artifact_dirs: list[str]) -> str | Returns ------- - str | None - The hash, or None if not found. + tuple[str | None, str | None] + The hash of, and path to, the artifact; or None if no artifact can be found locally or remotely. """ if not artifact_dirs: logger.debug("No artifact directories provided.") - return None + return None, None if not purl.version: logger.debug("PURL is missing version.") - return None + return None, None artifact_target = None if purl.type == "maven": @@ -286,20 +311,14 @@ def get_local_artifact_hash(purl: PackageURL, artifact_dirs: list[str]) -> str | if not artifact_target: logger.debug("PURL type not supported: %s", purl.type) - return None + return None, None for artifact_dir in artifact_dirs: - full_path = os.path.join(artifact_dir, artifact_target) - if not os.path.exists(full_path): + if not os.path.isdir(artifact_dir): continue + possible_artifact_path = os.path.join(artifact_dir, artifact_target) + artifact_hash = get_artifact_hash_from_file(possible_artifact_path) + if artifact_hash: + return artifact_hash, possible_artifact_path - with open(full_path, "rb") as file: - try: - hash_result = hashlib.file_digest(file, "sha256") - except ValueError as error: - logger.debug("Error while hashing file: %s", error) - continue - - return hash_result.hexdigest() - - return None + return None, None diff --git a/src/macaron/config/global_config.py b/src/macaron/config/global_config.py index 4e2befa6f..b57bb6680 100644 --- a/src/macaron/config/global_config.py +++ b/src/macaron/config/global_config.py @@ -49,6 +49,9 @@ class GlobalConfig: #: The path to the local .m2 Maven repository. This attribute is None if there is no available .m2 directory. local_maven_repo: str | None = None + #: The path to a local artifact file that can be used for analysis. + local_artifact_path: str | None = None + def load( self, macaron_path: str, diff --git a/src/macaron/slsa_analyzer/analyzer.py b/src/macaron/slsa_analyzer/analyzer.py index f10fb27a9..91044c33d 100644 --- a/src/macaron/slsa_analyzer/analyzer.py +++ b/src/macaron/slsa_analyzer/analyzer.py @@ -21,8 +21,9 @@ from macaron import __version__ from macaron.artifact.local_artifact import ( + get_artifact_hash_from_directory, + get_artifact_hash_from_file, get_local_artifact_dirs, - get_local_artifact_hash, ) from macaron.config.global_config import global_config from macaron.config.target_config import Configuration @@ -30,8 +31,10 @@ from macaron.database.table_definitions import ( Analysis, Component, + HashDigest, Provenance, ProvenanceSubject, + ReleaseArtifact, RepoFinderMetadata, Repository, ) @@ -508,11 +511,32 @@ def run_single( self._determine_build_tools(analyze_ctx, git_service) # Try to find an attestation from GitHub, if applicable. + release_artifact = None + release_digest = None if parsed_purl and not provenance_payload and analysis_target.repo_path and isinstance(git_service, GitHub): # Try to discover GitHub attestation for the target software component. - artifact_hash = self.get_artifact_hash(parsed_purl, local_artifact_dirs, package_registries_info) + local_artifact = global_config.local_artifact_path + artifact_hash, artifact_path = self.get_artifact_hash( + parsed_purl, local_artifact, local_artifact_dirs, package_registries_info + ) if artifact_hash: + digest = HashDigest( + digest=artifact_hash, + digest_algorithm="sha256", + ) + release_artifact = ReleaseArtifact( + name=artifact_path, + digests=[digest], + ) + release_digest = artifact_hash provenance_payload = self.get_github_attestation_payload(analyze_ctx, git_service, artifact_hash) + if provenance_payload: + try: + provenance_repo_url, provenance_commit_digest = extract_repo_and_commit_from_provenance( + provenance_payload + ) + except ProvenanceError as error: + logger.debug("Failed to extract from provenance: %s", error) self._determine_package_registries(analyze_ctx, package_registries_info) @@ -561,7 +585,7 @@ def run_single( analyze_ctx, provenance_payload, provenance_is_verified, provenance_l3_verified ) - analyze_ctx.dynamic_data["provenance_info"] = Provenance( + provenance = Provenance( component=component, repository_url=provenance_repo_url, commit_sha=provenance_commit_digest, @@ -571,8 +595,11 @@ def run_single( slsa_version=slsa_version, provenance_asset_name=provenance_asset.name if provenance_asset else None, provenance_asset_url=provenance_asset.url if provenance_asset else None, - # TODO Add release digest. + release_commit_sha=release_digest if release_digest else None, ) + analyze_ctx.dynamic_data["provenance_info"] = provenance + if release_artifact: + release_artifact.provenance = provenance if parsed_purl is not None: self._verify_repository_link(parsed_purl, analyze_ctx) @@ -973,35 +1000,46 @@ def create_analyze_ctx(self, component: Component) -> AnalyzeContext: def get_artifact_hash( self, purl: PackageURL, + local_artifact_file: str | None, local_artifact_dirs: list[str] | None, package_registries_info: list[PackageRegistryInfo], - ) -> str | None: + ) -> tuple[str | None, str | None]: """Get the hash of the artifact found from the passed PURL using local or remote files. - Provided local caches will be searched first. Artifacts will be downloaded if nothing is found within local - caches, or if no appropriate cache is provided for the target language. - Downloaded artifacts will be added to the passed package registry to prevent downloading them again. + Provided local artifacts will be searched first; cached local artifacts second. + Artifacts will be downloaded if nothing is found locally. Downloaded artifacts will be added to the passed + package registry to prevent downloading them again. Parameters ---------- purl: PackageURL The PURL of the artifact. + local_artifact_file: str + The path to a local artifact file provided by the user. local_artifact_dirs: list[str] | None - The list of directories that may contain the artifact file. + The list of cache directories that may contain the artifact file. package_registries_info: list[PackageRegistryInfo] The list of package registry information. Returns ------- - str | None - The hash of the artifact, or None if no artifact can be found locally or remotely. + tuple[str | None, str | None] + The hash of, and path to, the artifact; or None if no artifact can be found locally or remotely. """ + if local_artifact_file and os.path.isfile(local_artifact_file): + # Try to use the local artifact file. + # TODO add file checks based on the PURL type, if desired. E.g. .jar for Maven, etc. + artifact_hash = get_artifact_hash_from_file(local_artifact_file) + + if artifact_hash: + return artifact_hash, local_artifact_file + if local_artifact_dirs: - # Try to get the hash from a local file. - artifact_hash = get_local_artifact_hash(purl, local_artifact_dirs) + # Try to get the hash from a local cache. + artifact_hash, artifact_path = get_artifact_hash_from_directory(purl, local_artifact_dirs) if artifact_hash: - return artifact_hash + return artifact_hash, artifact_path # Download the artifact. if purl.type == "maven": @@ -1014,7 +1052,7 @@ def get_artifact_hash( None, ) if not maven_registry: - return None + return None, None return maven_registry.get_artifact_hash(purl) @@ -1029,7 +1067,7 @@ def get_artifact_hash( ) if not pypi_registry: logger.debug("Missing registry for PyPI") - return None + return None, None registry_info = next( ( @@ -1041,31 +1079,35 @@ def get_artifact_hash( ) if not registry_info: logger.debug("Missing registry information for PyPI") - return None + return None, None if not purl.version: - return None + return None, None pypi_asset = find_or_create_pypi_asset(purl.name, purl.version, registry_info) if not pypi_asset: - return None + return None, None pypi_asset.has_repository = True if not pypi_asset.download(""): - return None + return None, None artifact_hash = pypi_asset.get_sha256() if artifact_hash: - return artifact_hash + return artifact_hash, pypi_asset.url source_url = pypi_asset.get_sourcecode_url("bdist_wheel") if not source_url: - return None + return None, None + + artifact_hash = pypi_registry.get_artifact_hash(source_url) + if artifact_hash: + return artifact_hash, source_url - return pypi_registry.get_artifact_hash(source_url) + return None, None logger.debug("Purl type '%s' not yet supported for GitHub attestation discovery.", purl.type) - return None + return None, None def get_github_attestation_payload( self, analyze_ctx: AnalyzeContext, git_service: GitHub, artifact_hash: str diff --git a/src/macaron/slsa_analyzer/package_registry/maven_central_registry.py b/src/macaron/slsa_analyzer/package_registry/maven_central_registry.py index 2fe3c5cea..6ffa746c9 100644 --- a/src/macaron/slsa_analyzer/package_registry/maven_central_registry.py +++ b/src/macaron/slsa_analyzer/package_registry/maven_central_registry.py @@ -239,7 +239,7 @@ def find_publish_timestamp(self, purl: str) -> datetime: raise InvalidHTTPResponseError(f"Invalid response from Maven central for {url}.") - def get_artifact_hash(self, purl: PackageURL) -> str | None: + def get_artifact_hash(self, purl: PackageURL) -> tuple[str | None, str | None]: """Return the hash of the artifact found by the passed purl relevant to the registry's URL. An artifact's URL will be as follows: @@ -263,15 +263,17 @@ def get_artifact_hash(self, purl: PackageURL) -> str | None: Returns ------- - str | None - The hash of the artifact, or None if not found. + tuple[str | None, str | None] + The hash and URL of the artifact; or None if no artifact can be found. """ if not purl.namespace: - return None + logger.debug("Cannot search for Maven artifact without no group ID.") + return None, None file_name = construct_primary_jar_file_name(purl) if not (purl.version and file_name): - return None + logger.debug("Cannot construct remote file name of artifact with no PURL version.") + return None, None # Maven supports but does not require a sha256 hash of uploaded artifacts. artifact_path = construct_maven_repository_path(purl.namespace, purl.name, purl.version) @@ -290,10 +292,11 @@ def get_artifact_hash(self, purl: PackageURL) -> str | None: response.raise_for_status() except requests.exceptions.HTTPError as http_err: logger.debug("HTTP error occurred when trying to download artifact: %s", http_err) - return None + return None, None if response.status_code != 200: - return None + logger.debug("Request for artifact failed: %s", response.status_code) + return None, None # Download file and compute hash as chunks are received. hash_algorithm = hashlib.sha256() @@ -304,7 +307,7 @@ def get_artifact_hash(self, purl: PackageURL) -> str | None: # Something went wrong with the request, abort. logger.debug("Error while streaming target file: %s", error) response.close() - return None + return None, None computed_artifact_hash: str = hash_algorithm.hexdigest() if retrieved_artifact_hash and computed_artifact_hash != retrieved_artifact_hash: @@ -313,7 +316,7 @@ def get_artifact_hash(self, purl: PackageURL) -> str | None: computed_artifact_hash, retrieved_artifact_hash, ) - return None + return None, None logger.debug("Computed hash of artifact: %s", computed_artifact_hash) - return computed_artifact_hash + return computed_artifact_hash, artifact_url diff --git a/tests/artifact/test_local_artifact.py b/tests/artifact/test_local_artifact.py index 5ac5cf651..509bfd3ab 100644 --- a/tests/artifact/test_local_artifact.py +++ b/tests/artifact/test_local_artifact.py @@ -14,8 +14,9 @@ construct_local_artifact_dirs_glob_pattern_maven_purl, construct_local_artifact_dirs_glob_pattern_pypi_purl, find_artifact_dirs_from_python_venv, + get_artifact_hash_from_directory, + get_artifact_hash_from_file, get_local_artifact_dirs, - get_local_artifact_hash, ) from macaron.artifact.maven import construct_primary_jar_file_name from macaron.errors import LocalArtifactFinderError @@ -253,8 +254,8 @@ def test_get_local_artifact_paths_succeeded_pypi(tmp_path: Path) -> None: assert sorted(result) == sorted(pypi_artifact_paths) -def test_get_local_artifact_hash() -> None: - """Test the get local artifact hash function.""" +def test_get_artifact_hash_from_directory() -> None: + """Test the get artifact hash from directory.""" artifact_purl = PackageURL.from_string("pkg:maven/test/test@1") artifact_jar_name = construct_primary_jar_file_name(artifact_purl) @@ -268,4 +269,18 @@ def test_get_local_artifact_hash() -> None: # A file containing: "1". target_hash = "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" - assert target_hash == get_local_artifact_hash(artifact_purl, [temp_dir]) + computed_hash, _ = get_artifact_hash_from_directory(artifact_purl, [temp_dir]) + assert target_hash == computed_hash + + +def test_get_artifact_hash_from_file() -> None: + """Test the get artifact hash from file.""" + with tempfile.TemporaryDirectory() as temp_dir: + artifact_path = os.path.join(temp_dir, "1") + with open(artifact_path, "w", encoding="utf8") as file: + file.write("1") + + # A file containing: "1". + target_hash = "6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" + + assert target_hash == get_artifact_hash_from_file(artifact_path) diff --git a/tests/integration/cases/github_maven_artifact_local/failure_policy.dl b/tests/integration/cases/github_maven_artifact_local/failure_policy.dl new file mode 100644 index 000000000..ff31abf90 --- /dev/null +++ b/tests/integration/cases/github_maven_artifact_local/failure_policy.dl @@ -0,0 +1,10 @@ +/* Copyright (c) 2024 - 2025, Oracle and/or its affiliates. All rights reserved. */ +/* Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. */ + +#include "prelude.dl" + +Policy("test_policy", component_id, "") :- + check_failed(component_id, "mcn_provenance_available_1"). + +apply_policy_to("test_policy", component_id) :- + is_component(component_id, "pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22"). diff --git a/tests/integration/cases/github_maven_artifact_local/success_policy.dl b/tests/integration/cases/github_maven_artifact_local/success_policy.dl new file mode 100644 index 000000000..9df46219b --- /dev/null +++ b/tests/integration/cases/github_maven_artifact_local/success_policy.dl @@ -0,0 +1,10 @@ +/* Copyright (c) 2024 - 2025, Oracle and/or its affiliates. All rights reserved. */ +/* Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. */ + +#include "prelude.dl" + +Policy("test_policy", component_id, "") :- + check_passed(component_id, "mcn_provenance_available_1"). + +apply_policy_to("test_policy", component_id) :- + is_component(component_id, "pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22"). diff --git a/tests/integration/cases/github_maven_artifact_local/test.yaml b/tests/integration/cases/github_maven_artifact_local/test.yaml new file mode 100644 index 000000000..e440a5add --- /dev/null +++ b/tests/integration/cases/github_maven_artifact_local/test.yaml @@ -0,0 +1,53 @@ +# Copyright (c) 2024 - 2025, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +description: | + Discovering GitHub attestation of a command line provided Maven artifact. + An incorrect file is downloaded first, to demonstrate that the feature is taking precedence over other options. + The correct file is then used to ensure the feature works. + +tags: +- macaron-python-package +- macaron-docker-image + +steps: +- name: Download artifact POM instead of the JAR + kind: shell + options: + cmd: curl --create-dirs -o ./output/liftwizard-checkstyle-2.1.22.jar https://repo1.maven.org/maven2/io/liftwizard/liftwizard-checkstyle/2.1.22/liftwizard-checkstyle-2.1.22.pom +- name: Run macaron analyze + kind: analyze + options: + command_args: + - -purl + - pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22 + - -rp + - https://github.com/liftwizard/liftwizard + - --local-artifact-path + - ./output/liftwizard-checkstyle-2.1.22.jar +- name: Run macaron verify-policy to verify no provenance was found + kind: verify + options: + policy: failure_policy.dl +- name: cleanup + kind: shell + options: + cmd: rm -rf ./output +- name: Download artifact JAR + kind: shell + options: + cmd: curl --create-dirs -o ./output/liftwizard-checkstyle-2.1.22.jar https://repo1.maven.org/maven2/io/liftwizard/liftwizard-checkstyle/2.1.22/liftwizard-checkstyle-2.1.22.jar +- name: Run macaron analyze + kind: analyze + options: + command_args: + - -purl + - pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22 + - -rp + - https://github.com/liftwizard/liftwizard + - --local-artifact-path + - ./output/liftwizard-checkstyle-2.1.22.jar +- name: Run macaron verify-policy to verify no provenance was found + kind: verify + options: + policy: success_policy.dl From 9ef8b3ea2b748d2ca072febf9ff7b8e7f5457135 Mon Sep 17 00:00:00 2001 From: Ben Selwyn-Smith Date: Tue, 19 Aug 2025 13:29:41 +1000 Subject: [PATCH 2/3] chore: simplify integration test Signed-off-by: Ben Selwyn-Smith --- .../{failure_policy.dl => policy.dl} | 3 +- .../success_policy.dl | 10 ------- .../github_maven_artifact_local/test.yaml | 28 ++----------------- 3 files changed, 4 insertions(+), 37 deletions(-) rename tests/integration/cases/github_maven_artifact_local/{failure_policy.dl => policy.dl} (74%) delete mode 100644 tests/integration/cases/github_maven_artifact_local/success_policy.dl diff --git a/tests/integration/cases/github_maven_artifact_local/failure_policy.dl b/tests/integration/cases/github_maven_artifact_local/policy.dl similarity index 74% rename from tests/integration/cases/github_maven_artifact_local/failure_policy.dl rename to tests/integration/cases/github_maven_artifact_local/policy.dl index ff31abf90..70ce0dd33 100644 --- a/tests/integration/cases/github_maven_artifact_local/failure_policy.dl +++ b/tests/integration/cases/github_maven_artifact_local/policy.dl @@ -4,7 +4,8 @@ #include "prelude.dl" Policy("test_policy", component_id, "") :- - check_failed(component_id, "mcn_provenance_available_1"). + check_passed(component_id, "mcn_provenance_available_1"), + release_artifact(_, "./output/liftwizard-checkstyle-2.1.22.jar", _, _). apply_policy_to("test_policy", component_id) :- is_component(component_id, "pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22"). diff --git a/tests/integration/cases/github_maven_artifact_local/success_policy.dl b/tests/integration/cases/github_maven_artifact_local/success_policy.dl deleted file mode 100644 index 9df46219b..000000000 --- a/tests/integration/cases/github_maven_artifact_local/success_policy.dl +++ /dev/null @@ -1,10 +0,0 @@ -/* Copyright (c) 2024 - 2025, Oracle and/or its affiliates. All rights reserved. */ -/* Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. */ - -#include "prelude.dl" - -Policy("test_policy", component_id, "") :- - check_passed(component_id, "mcn_provenance_available_1"). - -apply_policy_to("test_policy", component_id) :- - is_component(component_id, "pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22"). diff --git a/tests/integration/cases/github_maven_artifact_local/test.yaml b/tests/integration/cases/github_maven_artifact_local/test.yaml index e440a5add..83839863e 100644 --- a/tests/integration/cases/github_maven_artifact_local/test.yaml +++ b/tests/integration/cases/github_maven_artifact_local/test.yaml @@ -3,36 +3,12 @@ description: | Discovering GitHub attestation of a command line provided Maven artifact. - An incorrect file is downloaded first, to demonstrate that the feature is taking precedence over other options. - The correct file is then used to ensure the feature works. tags: - macaron-python-package - macaron-docker-image steps: -- name: Download artifact POM instead of the JAR - kind: shell - options: - cmd: curl --create-dirs -o ./output/liftwizard-checkstyle-2.1.22.jar https://repo1.maven.org/maven2/io/liftwizard/liftwizard-checkstyle/2.1.22/liftwizard-checkstyle-2.1.22.pom -- name: Run macaron analyze - kind: analyze - options: - command_args: - - -purl - - pkg:maven/io.liftwizard/liftwizard-checkstyle@2.1.22 - - -rp - - https://github.com/liftwizard/liftwizard - - --local-artifact-path - - ./output/liftwizard-checkstyle-2.1.22.jar -- name: Run macaron verify-policy to verify no provenance was found - kind: verify - options: - policy: failure_policy.dl -- name: cleanup - kind: shell - options: - cmd: rm -rf ./output - name: Download artifact JAR kind: shell options: @@ -47,7 +23,7 @@ steps: - https://github.com/liftwizard/liftwizard - --local-artifact-path - ./output/liftwizard-checkstyle-2.1.22.jar -- name: Run macaron verify-policy to verify no provenance was found +- name: Run macaron verify-policy to verify provenance was found kind: verify options: - policy: success_policy.dl + policy: policy.dl From 3de80b078530ea0ce0959b00858f6ece399fe2b4 Mon Sep 17 00:00:00 2001 From: Ben Selwyn-Smith Date: Tue, 19 Aug 2025 13:44:03 +1000 Subject: [PATCH 3/3] chore: update tests Signed-off-by: Ben Selwyn-Smith --- .../package_registry/test_maven_central_registry.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/slsa_analyzer/package_registry/test_maven_central_registry.py b/tests/slsa_analyzer/package_registry/test_maven_central_registry.py index 40b51c9ae..1da8e9088 100644 --- a/tests/slsa_analyzer/package_registry/test_maven_central_registry.py +++ b/tests/slsa_analyzer/package_registry/test_maven_central_registry.py @@ -257,7 +257,7 @@ def test_get_artifact_hash_failures( httpserver.expect_request(artifact_path + ".sha256").respond_with_data(expected_hash) httpserver.expect_request(artifact_path).respond_with_data(b"example_data_2") - result = maven_registry.get_artifact_hash(purl) + result, _ = maven_registry.get_artifact_hash(purl) assert not result @@ -282,7 +282,9 @@ def test_get_artifact_hash_success( expected_hash = hash_algorithm.hexdigest() httpserver.expect_request(artifact_path + ".sha256").respond_with_data(expected_hash) httpserver.expect_request(artifact_path).respond_with_data(b"example_data") + expected_artifact_path = f"http://{httpserver.host}:{httpserver.port}{artifact_path}" - result = maven_registry.get_artifact_hash(purl) + result, result_path = maven_registry.get_artifact_hash(purl) assert result + assert expected_artifact_path == result_path