diff --git a/bci_tester/data.py b/bci_tester/data.py index 5ae88620..c8fea65d 100755 --- a/bci_tester/data.py +++ b/bci_tester/data.py @@ -70,13 +70,15 @@ # List the released versions of SLE, used for supportabilty and EULA tests RELEASED_SLE_VERSIONS = ("15.3", "15.4", "15.5", "15.6", "15.6-ai") -assert ( - sorted(ALLOWED_BASE_OS_VERSIONS) == list(ALLOWED_BASE_OS_VERSIONS) -), f"list ALLOWED_BASE_OS_VERSIONS must be sorted, but got {ALLOWED_BASE_OS_VERSIONS}" +assert sorted(ALLOWED_BASE_OS_VERSIONS) == list(ALLOWED_BASE_OS_VERSIONS), ( + f"list ALLOWED_BASE_OS_VERSIONS must be sorted, but got {ALLOWED_BASE_OS_VERSIONS}" +) -assert ( - sorted(ALLOWED_NONBASE_OS_VERSIONS) == list(ALLOWED_NONBASE_OS_VERSIONS) -), f"list ALLOWED_NONOS_VERSIONS must be sorted, but got {ALLOWED_NONBASE_OS_VERSIONS}" +assert sorted(ALLOWED_NONBASE_OS_VERSIONS) == list( + ALLOWED_NONBASE_OS_VERSIONS +), ( + f"list ALLOWED_NONOS_VERSIONS must be sorted, but got {ALLOWED_NONBASE_OS_VERSIONS}" +) if not ( OS_VERSION in ALLOWED_BASE_OS_VERSIONS diff --git a/conftest.py b/conftest.py index fbbb7936..ce29d117 100644 --- a/conftest.py +++ b/conftest.py @@ -53,9 +53,9 @@ def container_git_clone( assert isinstance(container_fixture, ContainerData) - assert ( - container_fixture is not None - ), "No container fixture was passed to the test function, cannot execute `container_git_clone`" + assert container_fixture is not None, ( + "No container fixture was passed to the test function, cannot execute `container_git_clone`" + ) check_output(shlex.split(git_repo_build.clone_command), cwd=tmp_path) diff --git a/tests/test_all.py b/tests/test_all.py index f1ed020c..0578bdde 100644 --- a/tests/test_all.py +++ b/tests/test_all.py @@ -194,9 +194,9 @@ def test_lifecycle(auto_container): support_end = datetime.datetime.strptime( entry_date, "%Y-%m-%d" ) - assert ( - datetime.datetime.now() < support_end - ), f"{entry_name} = {installed_binaries[entry_name]} installed but out of support since {entry_date}" + assert datetime.datetime.now() < support_end, ( + f"{entry_name} = {installed_binaries[entry_name]} installed but out of support since {entry_date}" + ) @pytest.mark.skipif( @@ -446,9 +446,9 @@ def test_systemd_not_installed_in_all_containers_except_init(container): # we cannot check for an existing package if rpm is not installed if container.connection.exists("rpm"): - assert not container.connection.package( - "systemd" - ).is_installed, "systemd is installed in this container!" + assert not container.connection.package("systemd").is_installed, ( + "systemd is installed in this container!" + ) @pytest.mark.parametrize( @@ -478,9 +478,9 @@ def test_bci_eula_is_correctly_available(container: ContainerData) -> None: OS_VERSION in ALLOWED_BCI_REPO_OS_VERSIONS and OS_VERSION in RELEASED_SLE_VERSIONS ): - assert container.connection.file( - bci_license - ).exists, "BCI EULA is missing" + assert container.connection.file(bci_license).exists, ( + "BCI EULA is missing" + ) assert ( "SUSE Linux Enterprise Base Container Image License" in container.connection.check_output(f"head -n 1 {bci_license}") @@ -500,9 +500,9 @@ def test_bci_eula_is_correctly_available(container: ContainerData) -> None: ): pytest.skip("Unmaintained bci-* base os containers are not tested") - assert not container.connection.file( - bci_license - ).exists, "BCI EULA shall not be in LTSS container" + assert not container.connection.file(bci_license).exists, ( + "BCI EULA shall not be in LTSS container" + ) @pytest.mark.skipif( @@ -633,9 +633,9 @@ def test_container_build_and_repo(container_per_test, host): repos = get_repos_from_connection(container_per_test.connection) - assert ( - len(repos) > 1 - ), "On a registered host, we must have more than one repository on the host" + assert len(repos) > 1, ( + "On a registered host, we must have more than one repository on the host" + ) else: assert len(repos) <= len(expected_repos) assert not repo_names - expected_repos @@ -670,12 +670,13 @@ def test_container_build_and_repo(container_per_test, host): ) assert ( - ("SLE_BCI_debug" in repo_names and "SLE_BCI_source" in repo_names) - or ( - "SLE_BCI_debug" not in repo_names - and "SLE_BCI_source" not in repo_names - ) - ), "repos SLE_BCI_source and SLE_BCI_debug must either both be present or both missing" + "SLE_BCI_debug" in repo_names and "SLE_BCI_source" in repo_names + ) or ( + "SLE_BCI_debug" not in repo_names + and "SLE_BCI_source" not in repo_names + ), ( + "repos SLE_BCI_source and SLE_BCI_debug must either both be present or both missing" + ) # check that all enabled repos are valid and can be refreshed container_per_test.connection.run_expect([0], "zypper -n ref") diff --git a/tests/test_distribution.py b/tests/test_distribution.py index fe01f2e3..e36555b7 100644 --- a/tests/test_distribution.py +++ b/tests/test_distribution.py @@ -41,7 +41,7 @@ def _fetch_catalog(): host.run_expect( [0], textwrap.dedent( - f"""cd {tmp_path} && {' '.join(container_runtime.build_command)} \ + f"""cd {tmp_path} && {" ".join(container_runtime.build_command)} \ -t {container_path} -f Containerfile .""", ), ) diff --git a/tests/test_dotnet.py b/tests/test_dotnet.py index f9060a9a..1c8f49b1 100644 --- a/tests/test_dotnet.py +++ b/tests/test_dotnet.py @@ -223,9 +223,9 @@ def get_pkg_list(extra_search_flags: str = "") -> List[str]: return valid_names repos = get_repos_from_connection(container_per_test.connection) - assert ( - len(repos) >= 2 - ), "The .Net containers must contain the SLE_BCI and MS .Net repository" + assert len(repos) >= 2, ( + "The .Net containers must contain the SLE_BCI and MS .Net repository" + ) ms_repos = [repo for repo in repos if repo.name == MS_REPO_NAME] assert len(ms_repos) == 1 diff --git a/tests/test_fips.py b/tests/test_fips.py index abd00e2f..c8532ba7 100644 --- a/tests/test_fips.py +++ b/tests/test_fips.py @@ -151,9 +151,9 @@ def openssl_fips_hashes_test_fnct(container_per_test: ContainerData) -> None: dev_null_digest = container_per_test.connection.check_output( f"openssl {digest} /dev/null" ) - assert ( - f"= {NULL_DIGESTS[digest]}" in dev_null_digest - ), f"unexpected digest of hash {digest}: {dev_null_digest}" + assert f"= {NULL_DIGESTS[digest]}" in dev_null_digest, ( + f"unexpected digest of hash {digest}: {dev_null_digest}" + ) @pytest.mark.skipif( @@ -218,9 +218,9 @@ def test_gnutls_binary(container_per_test: ContainerData) -> None: for digest in NONFIPS_GNUTLS_DIGESTS: err_msg = c.run_expect([1], f"/bin/fips-test-gnutls {digest}").stderr - assert ( - "Hash calculation failed" in err_msg - ), f"Hash calculation unexpectedly succeeded for {digest}" + assert "Hash calculation failed" in err_msg, ( + f"Hash calculation unexpectedly succeeded for {digest}" + ) @pytest.mark.parametrize( diff --git a/tests/test_go.py b/tests/test_go.py index e7b4efb8..8eb2d3a1 100644 --- a/tests/test_go.py +++ b/tests/test_go.py @@ -180,9 +180,9 @@ def test_rancher_build(host, host_git_clone, dapper, container: ContainerData): ) from_line = from_line_regex.match(contents) - assert from_line and from_line.group( - "go_ver" - ), f"No valid FROM line found in Dockerfile.dapper: {contents}" + assert from_line and from_line.group("go_ver"), ( + f"No valid FROM line found in Dockerfile.dapper: {contents}" + ) go_version = container.connection.check_output("echo $GOLANG_VERSION") if not go_version.startswith(from_line.group("go_ver")): pytest.skip( diff --git a/tests/test_init.py b/tests/test_init.py index ea6ffc00..4b5d6faf 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -88,9 +88,9 @@ def extract_time(stdout, prefix): startup = extract_time(time.stdout, "Startup finished in ") assert startup <= startup_limit, "Startup threshold exceeded" target = extract_time(time.stdout, ".target reached after ") - assert ( - target <= target_limit - ), "Reaching systemd target threshold exceeded" + assert target <= target_limit, ( + "Reaching systemd target threshold exceeded" + ) def test_systemd_nofailed_units(self, auto_container): """ @@ -99,9 +99,9 @@ def test_systemd_nofailed_units(self, auto_container): output = auto_container.connection.run_expect( [0], "systemctl list-units --state=failed" ) - assert ( - "0 loaded units listed" in output.stdout - ), "failed systemd units detected" + assert "0 loaded units listed" in output.stdout, ( + "failed systemd units detected" + ) def test_systemd_detect_virt(self, auto_container, container_runtime): """ @@ -109,9 +109,9 @@ def test_systemd_detect_virt(self, auto_container, container_runtime): """ output = auto_container.connection.check_output("systemd-detect-virt") runtime = container_runtime.runner_binary - assert ( - runtime in output - ), f"systemd-detect-virt failed to detect {runtime}" + assert runtime in output, ( + f"systemd-detect-virt failed to detect {runtime}" + ) def test_journald(self, auto_container): """ @@ -122,9 +122,9 @@ def test_journald(self, auto_container): journal = auto_container.connection.run_expect( [0], "journalctl --boot" ) - assert ( - "Reached target Multi-User System" in journal.stdout - ), "Multi-User target was not reached" + assert "Reached target Multi-User System" in journal.stdout, ( + "Multi-User target was not reached" + ) def test_hostnamectl(self, auto_container, container_runtime): """ @@ -143,9 +143,9 @@ def test_hostnamectl(self, auto_container, container_runtime): if OS_VERSION == "tumbleweed" else "SUSE Linux Enterprise Server" ) - assert ( - expected_os in values["OperatingSystemPrettyName"] - ), "Missing SUSE tag in Operating system" + assert expected_os in values["OperatingSystemPrettyName"], ( + "Missing SUSE tag in Operating system" + ) virt_detected = auto_container.connection.run_expect( [0], "systemd-detect-virt -c" @@ -159,9 +159,9 @@ def test_timedatectl(self, auto_container): Ensure :command:`timedatectl` works as expected and the container timezone is UTC """ output = auto_container.connection.check_output("timedatectl") - assert re.search( - r"Time zone:.*(Etc/UTC|UTC)", output - ), "Time zone not set to UTC" + assert re.search(r"Time zone:.*(Etc/UTC|UTC)", output), ( + "Time zone not set to UTC" + ) # Check that the reported timestamp for UTC and local time match the system time def check_timestamp(pattern, timestamp, delta): @@ -169,16 +169,16 @@ def check_timestamp(pattern, timestamp, delta): e.g. use the "Universal time" as pattern and datetime.utcnow() to check for the UTC time """ grep = [line for line in output.split("\n") if pattern in line] - assert ( - len(grep) == 1 - ), f"{pattern} not present in timedatectl output" + assert len(grep) == 1, ( + f"{pattern} not present in timedatectl output" + ) tsp = ( grep[0].strip()[len(pattern) + 2 :].strip() ) # Extract actual timestamp tsp = datetime.datetime.strptime(tsp, "%a %Y-%m-%d %H:%M:%S UTC") - assert ( - abs(tsp - timestamp) < delta - ), f"timedatectl diff exceeded for {pattern}" + assert abs(tsp - timestamp) < delta, ( + f"timedatectl diff exceeded for {pattern}" + ) check_timestamp( "Universal time", @@ -197,6 +197,6 @@ def test_no_loginctl_sessions(self, auto_container): Ensure :command:`loginctl` contains no logins """ loginctl = auto_container.connection.run_expect([0], "loginctl") - assert ( - "No sessions" in loginctl.stdout - ), "Assert no sessions are present failed" + assert "No sessions" in loginctl.stdout, ( + "Assert no sessions are present failed" + ) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 7630e510..5663365d 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -291,9 +291,9 @@ def _get_container_label_prefix( ] -assert len(ALL_CONTAINERS) == len( - IMAGES_AND_NAMES -), "IMAGES_AND_NAMES must have all containers from ALL_CONTAINERS" +assert len(ALL_CONTAINERS) == len(IMAGES_AND_NAMES), ( + "IMAGES_AND_NAMES must have all containers from ALL_CONTAINERS" +) @pytest.mark.parametrize( @@ -375,9 +375,9 @@ def test_general_labels( "https://www.suse.com/products/long-term-service-pack-support/", ) - assert ( - labels[f"{prefix}.url"] in expected_url - ), f"expected LABEL {prefix}.url = {expected_url} but is {labels[f'{prefix}.url']}" + assert labels[f"{prefix}.url"] in expected_url, ( + f"expected LABEL {prefix}.url = {expected_url} but is {labels[f'{prefix}.url']}" + ) assert labels[f"{prefix}.vendor"] == VENDOR if OS_VERSION == "tumbleweed": @@ -507,9 +507,9 @@ def test_disturl_can_be_checked_out( pytest.skip(reason=f"Cannot connect to SUSE internal host: {e}") raise req.raise_for_status() - assert ( - "kiwi" in req.text or "Dockerfile" in req.text - ), "Cannot find a valid build description" + assert "kiwi" in req.text or "Dockerfile" in req.text, ( + "Cannot find a valid build description" + ) @SKIP_IF_TW_MARK @@ -559,9 +559,9 @@ def test_acc_label(container: ContainerData): ``com.suse.supportlevel`` set to ``acc``. Reference: https://confluence.suse.com/display/ENGCTNRSTORY/SLE+BCI+Image+Overview """ - assert ( - container.inspect.config.labels["com.suse.supportlevel"] == "acc" - ), "acc images must be marked as acc" + assert container.inspect.config.labels["com.suse.supportlevel"] == "acc", ( + "acc images must be marked as acc" + ) @SKIP_IF_TW_MARK @@ -571,9 +571,9 @@ def test_l3_label(container: ContainerData): ``com.suse.supportlevel`` set to ``l3``. Reference: https://confluence.suse.com/display/ENGCTNRSTORY/SLE+BCI+Image+Overview """ - assert ( - container.inspect.config.labels["com.suse.supportlevel"] == "l3" - ), "image supportlevel must be marked as L3" + assert container.inspect.config.labels["com.suse.supportlevel"] == "l3", ( + "image supportlevel must be marked as L3" + ) @pytest.mark.parametrize( @@ -680,15 +680,15 @@ def test_oci_base_refs( base_digest: str = labels["org.opencontainers.image.base.digest"] base_name: str = labels["org.opencontainers.image.base.name"] - assert ( - ":" in base_name - ), f"`org.opencontainers.image.base.name` is not the expected format: {base_name}" + assert ":" in base_name, ( + f"`org.opencontainers.image.base.name` is not the expected format: {base_name}" + ) base_repository = base_name.partition(":")[0] assert base_name.startswith("registry.suse.com/") - assert ( - f":{OS_VERSION_ID}" in base_name - ), "Base image reference is not the expected version" + assert f":{OS_VERSION_ID}" in base_name, ( + "Base image reference is not the expected version" + ) assert base_digest.startswith("sha256:") if PODMAN_SELECTED and container_runtime.version.major < 3: diff --git a/tests/test_minimal.py b/tests/test_minimal.py index e0ea234a..02a55ddf 100644 --- a/tests/test_minimal.py +++ b/tests/test_minimal.py @@ -105,9 +105,9 @@ def test_fat_packages_absent(container): ) def test_rpm_absent_in_micro(container): """Ensure that rpm is not present in the micro container.""" - assert not container.connection.exists( - "rpm" - ), "rpm must not be present in the micro container" + assert not container.connection.exists("rpm"), ( + "rpm must not be present in the micro container" + ) @pytest.mark.parametrize( @@ -115,6 +115,6 @@ def test_rpm_absent_in_micro(container): ) def test_rpm_present_in_minimal(container): """Ensure that rpm is present in the minimal container.""" - assert container.connection.exists( - "rpm" - ), "rpm must be present in the minimal container" + assert container.connection.exists("rpm"), ( + "rpm must be present in the minimal container" + ) diff --git a/tests/test_openjdk.py b/tests/test_openjdk.py index ffae98d1..1fa52a67 100644 --- a/tests/test_openjdk.py +++ b/tests/test_openjdk.py @@ -154,9 +154,9 @@ def get_env_var(var: str) -> str: if line.strip().startswith("java.home"): assert line.strip().replace("java.home = ", "") == java_home_path java_home_setting_checked = True - assert ( - java_home_setting_checked - ), f"java.home setting missing in the output of {java_props_cmd}" + assert java_home_setting_checked, ( + f"java.home setting missing in the output of {java_props_cmd}" + ) @dataclass(frozen=True) diff --git a/tests/test_pcp.py b/tests/test_pcp.py index c247a30d..382e3569 100644 --- a/tests/test_pcp.py +++ b/tests/test_pcp.py @@ -23,9 +23,9 @@ def test_pcp_services_status(auto_container_per_test): """Check that the pcp services are healthy.""" for service in ("pmcd", "pmlogger", "pmproxy", "pmie"): - assert wait_for_service( - auto_container_per_test, service - ), f"Timed out waiting for {service} to start" + assert wait_for_service(auto_container_per_test, service), ( + f"Timed out waiting for {service} to start" + ) def test_call_pmcd(auto_container_per_test): @@ -33,9 +33,9 @@ def test_call_pmcd(auto_container_per_test): functions. """ - assert wait_for_service( - auto_container_per_test, "pmcd" - ), "Timed out waiting for pmcd to start" + assert wait_for_service(auto_container_per_test, "pmcd"), ( + "Timed out waiting for pmcd to start" + ) auto_container_per_test.connection.run_expect( [0], "pmprobe -v mem.physmem" @@ -49,9 +49,9 @@ def test_call_pmproxy(auto_container_per_test): """ port = auto_container_per_test.forwarded_ports[0].host_port for service in ("pmcd", "pmproxy"): - assert wait_for_service( - auto_container_per_test, service - ), f"Timed out waiting for {service} to start" + assert wait_for_service(auto_container_per_test, service), ( + f"Timed out waiting for {service} to start" + ) resp = requests.get( f"http://localhost:{port}/metrics?names=mem.physmem", timeout=30 diff --git a/tests/test_python.py b/tests/test_python.py index fcf64c2b..5586a0f4 100644 --- a/tests/test_python.py +++ b/tests/test_python.py @@ -103,12 +103,12 @@ def test_pipx(container_per_test): ) run1 = container_per_test.connection.check_output("xkcdpass") run2 = container_per_test.connection.check_output("xkcdpass") - assert ( - len(run1) > 20 and len(run2) > 20 - ), "xkcdpass should output a passphrase with more than 20 characters" - assert ( - run1 != run2 - ), "xkcdpass should output a different passphrase each time" + assert len(run1) > 20 and len(run2) > 20, ( + "xkcdpass should output a passphrase with more than 20 characters" + ) + assert run1 != run2, ( + "xkcdpass should output a different passphrase each time" + ) def test_pip(auto_container): diff --git a/tests/test_repository.py b/tests/test_repository.py index 85c85b4c..538c4f86 100644 --- a/tests/test_repository.py +++ b/tests/test_repository.py @@ -146,7 +146,9 @@ def test_sle_bci_forbidden_packages(container_per_test): ) ) - assert not forbidden_packages, f"package_list must not contain any forbidden packages, but found {', '.join(forbidden_packages)}" + assert not forbidden_packages, ( + f"package_list must not contain any forbidden packages, but found {', '.join(forbidden_packages)}" + ) @pytest.mark.skipif( @@ -208,12 +210,12 @@ def test_codestream_lifecycle(container_per_test): lifecycle = zypper_lifecycle_xml.find( ".//product[@name='SLES']/xmlfwd/codestream/endoflife" ) - assert ( - lifecycle is not None - ), "No endoflife information found in product description" - assert ( - lifecycle.text == "2031-07-31" - ), f"Expected end of life 2031-07-31, but got {lifecycle.text}" + assert lifecycle is not None, ( + "No endoflife information found in product description" + ) + assert lifecycle.text == "2031-07-31", ( + f"Expected end of life 2031-07-31, but got {lifecycle.text}" + ) @pytest.mark.skipif(