Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 20 additions & 15 deletions src/macaron/config/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ class ConfigParser(configparser.ConfigParser):
def get_list(
self,
section: str,
item: str,
option: str,
delimiter: str | None = "\n",
fallback: list[str] | None = None,
duplicated_ok: bool = False,
strip: bool = True,
remove_duplicates: bool = True,
) -> list[str]:
r"""Parse and return a list of strings from an item in ``defaults.ini``.
r"""Parse and return a list of strings from an ``option`` for ``section`` in ``defaults.ini``.

This method uses str.split() to split the value into list of strings.
References: https://docs.python.org/3/library/stdtypes.html#str.split.
Expand All @@ -37,24 +37,26 @@ def get_list(
If ``strip`` is True (default: True), strings are whitespace-stripped and empty strings
are removed from the final result.

If ``duplicated_ok`` is True (default: False), duplicated values are not removed from the final list.
If `remove_duplicates` is True, duplicated elements which come after the their first instances will
be removed from the list. This operation happens after ``strip`` is handled.

The order of non-empty elements in the list is preserved.
The content of each string in the list is not validated and should be handled separately.

Parameters
----------
section : str
The section in ``defaults.ini``.
item : str
The item to parse the list.
option : str
The option whose values will be split into the a list of strings.
delimiter : str | None
The delimiter used to split the strings.
fallback : list | None
The fallback value in case of errors.
duplicated_ok : bool
If True allow duplicate values.
strip: bool
strip : bool
If True, strings are whitespace-stripped and any empty strings are removed.
remove_duplicates : bool
If True, duplicated elements will be removed from the list.

Returns
-------
Expand All @@ -79,20 +81,23 @@ def get_list(
allowed_hosts == ["github.com", "boo.com gitlab.com", "host com"]
"""
try:
value = self.get(section, item)
value = self.get(section, option)
if isinstance(value, str):
content = value.split(sep=delimiter)

if strip:
content = [x.strip() for x in content if x.strip()]

if duplicated_ok:
if not remove_duplicates:
return content

distinct_values = set()
distinct_values.update(content)
return list(distinct_values)
except configparser.NoOptionError as error:
values = []
for ele in content:
if ele in values:
continue
values.append(ele)
return values
except (configparser.NoOptionError, configparser.NoSectionError) as error:
logger.error(error)

return fallback or []
Expand Down
3 changes: 1 addition & 2 deletions src/macaron/repo_finder/repo_finder_java.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ def _create_urls(self, group: str, artifact: str, version: str) -> list[str]:
"repofinder.java",
"artifact_repositories",
fallback=["https://repo.maven.apache.org/maven2"],
duplicated_ok=True,
)
urls = []
for repo in repositories:
Expand Down Expand Up @@ -163,7 +162,7 @@ def _read_pom(self, pom: str) -> list[str]:
The extracted contents as a list of strings.
"""
# Retrieve tags
tags = defaults.get_list("repofinder.java", "repo_pom_paths", duplicated_ok=True)
tags = defaults.get_list("repofinder.java", "repo_pom_paths")
if not any(tags):
logger.debug("No POM tags found for URL discovery.")
return []
Expand Down
4 changes: 2 additions & 2 deletions src/macaron/slsa_analyzer/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -680,8 +680,8 @@ def prepare(self) -> bool:
logger.error("Found circular dependencies in registered checks: %s", str(error))
return False

ex_pats = defaults.get_list(section="analysis.checks", item="exclude", fallback=[])
in_pats = defaults.get_list(section="analysis.checks", item="include", fallback=["*"])
ex_pats = defaults.get_list(section="analysis.checks", option="exclude", fallback=[])
in_pats = defaults.get_list(section="analysis.checks", option="include", fallback=["*"])
try:
checks_to_run = self.get_final_checks(ex_pats, in_pats)
except CheckRegistryError as error:
Expand Down
Loading