diff --git a/micromamba/setup.cfg b/micromamba/setup.cfg new file mode 100644 index 0000000000..7fdf37fb20 --- /dev/null +++ b/micromamba/setup.cfg @@ -0,0 +1,2 @@ +[pycodestyle] +ignore = E5,W1,W2,W3,W5 diff --git a/micromamba/tests/conftest.py b/micromamba/tests/conftest.py index f7246cfc64..1552a60cbb 100644 --- a/micromamba/tests/conftest.py +++ b/micromamba/tests/conftest.py @@ -1,12 +1,32 @@ +import copy import os import pathlib import platform -from typing import Generator +from typing import Any, Generator, Mapping import pytest from . import helpers +#################### +# Config options # +#################### + + +def pytest_addoption(parser): + """Add pkgs-dir command line argument to pytest.""" + parser.addoption( + "--mamba-pkgs-dir", + action="store", + default=None, + help="Package cache to resuse between tests", + ) + + +################## +# Test fixture # +################## + @pytest.fixture def tmp_home(tmp_path: pathlib.Path) -> Generator[pathlib.Path, None, None]: @@ -31,11 +51,15 @@ def tmp_home(tmp_path: pathlib.Path) -> Generator[pathlib.Path, None, None]: @pytest.fixture(scope="session") -def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory) -> pathlib.Path: +def tmp_pkgs_dirs(tmp_path_factory: pytest.TempPathFactory, request) -> pathlib.Path: """A common package cache for mamba downloads. The directory is not used automatically when calling this fixture. """ + if (p := request.config.getoption("--mamba-pkgs-dir")) is not None: + p = pathlib.Path(p) + p.mkdir(parents=True, exist_ok=True) + return p return tmp_path_factory.mktemp("pkgs_dirs") @@ -45,19 +69,29 @@ def shared_pkgs_dirs(request) -> bool: return request.param +@pytest.fixture +def tmp_environ() -> Generator[Mapping[str, Any], None, None]: + """Saves and restore environment variables. + + This is used for test that need to modify ``os.environ`` + """ + old_environ = copy.deepcopy(os.environ) + yield old_environ + os.environ.clear() + os.environ.update(old_environ) + + @pytest.fixture def tmp_clean_env( - tmp_pkgs_dirs: pathlib.Path, shared_pkgs_dirs: bool + tmp_pkgs_dirs: pathlib.Path, shared_pkgs_dirs: bool, tmp_environ: None ) -> Generator[None, None, None]: """Remove all Conda/Mamba activation artifacts from environment.""" - saved_environ = {} for k, v in os.environ.items(): if k.startswith(("CONDA", "_CONDA", "MAMBA", "_MAMBA")): - saved_environ[k] = v del os.environ[k] def keep_in_path( - p: str, prefix: str | None = saved_environ.get("CONDA_PREFIX") + p: str, prefix: str | None = tmp_environ.get("CONDA_PREFIX") ) -> bool: if "condabin" in p: return False @@ -77,8 +111,6 @@ def keep_in_path( yield None - os.environ.update(saved_environ) - @pytest.fixture(params=[helpers.random_string, "long_prefix_" * 20]) def tmp_env_name(request) -> str: @@ -93,15 +125,11 @@ def tmp_root_prefix( tmp_path: pathlib.Path, tmp_clean_env: None ) -> Generator[pathlib.Path, None, None]: """Change the micromamba root directory to a tmp folder for the duration of a test.""" - old_root_prefix = os.environ.get("MAMBA_ROOT_PREFIX") new_root_prefix = tmp_path / "mamba" new_root_prefix.mkdir(parents=True, exist_ok=True) os.environ["MAMBA_ROOT_PREFIX"] = str(new_root_prefix) yield new_root_prefix - if old_root_prefix is not None: - os.environ["MAMBA_ROOT_PREFIX"] = old_root_prefix - else: - del os.environ["MAMBA_ROOT_PREFIX"] + # os.environ restored by tmp_clean_env and tmp_environ @pytest.fixture @@ -110,20 +138,12 @@ def tmp_empty_env( ) -> Generator[pathlib.Path, None, None]: """An empty envirnment created under a temporary root prefix.""" helpers.create("-n", tmp_env_name, no_dry_run=True) - yield tmp_root_prefix + yield tmp_root_prefix / "envs" / tmp_env_name @pytest.fixture -def tmp_prefix( - tmp_root_prefix: pathlib.Path, tmp_env_name: str -) -> Generator[pathlib.Path, None, None]: +def tmp_prefix(tmp_empty_env: pathlib.Path) -> Generator[pathlib.Path, None, None]: """Change the conda prefix to a tmp folder for the duration of a test.""" - old_prefix = os.environ.get("CONDA_PREFIX") - new_prefix = tmp_root_prefix / "envs" / tmp_env_name - new_prefix.mkdir(parents=True, exist_ok=True) - os.environ["CONDA_PREFIX"] = str(new_prefix) - yield new_prefix - if old_prefix is not None: - os.environ["CONDA_PREFIX"] = old_prefix - else: - del os.environ["CONDA_PREFIX"] + os.environ["CONDA_PREFIX"] = str(tmp_empty_env) + yield tmp_empty_env + # os.environ restored by tmp_environ through tmp_root_prefix diff --git a/micromamba/tests/helpers.py b/micromamba/tests/helpers.py index b9b81502a8..daefe9e114 100644 --- a/micromamba/tests/helpers.py +++ b/micromamba/tests/helpers.py @@ -166,7 +166,7 @@ def create( create_cmd="create", ): umamba = get_umamba() - cmd = [umamba] + create_cmd.split() + [arg for arg in args if arg] + cmd = [umamba] + create_cmd.split() + [str(arg) for arg in args if arg] if "--print-config-only" in args: cmd += ["--debug"] @@ -270,7 +270,7 @@ def update(*args, default_channel=True, no_rc=True, no_dry_run=False): def run_env(*args, f=None): umamba = get_umamba() - cmd = [umamba, "env"] + [arg for arg in args if arg] + cmd = [umamba, "env"] + [str(arg) for arg in args if arg] res = subprocess_run(*cmd) @@ -284,7 +284,7 @@ def run_env(*args, f=None): def umamba_list(*args): umamba = get_umamba() - cmd = [umamba, "list"] + [arg for arg in args if arg] + cmd = [umamba, "list"] + [str(arg) for arg in args if arg] res = subprocess_run(*cmd) if "--json" in args: @@ -297,7 +297,7 @@ def umamba_list(*args): def umamba_run(*args, **kwargs): umamba = get_umamba() - cmd = [umamba, "run"] + [arg for arg in args if arg] + cmd = [umamba, "run"] + [str(arg) for arg in args if arg] res = subprocess_run(*cmd, **kwargs) if "--json" in args: @@ -310,7 +310,7 @@ def umamba_run(*args, **kwargs): def umamba_repoquery(*args, no_rc=True): umamba = get_umamba() - cmd = [umamba, "repoquery"] + [arg for arg in args if arg] + cmd = [umamba, "repoquery"] + [str(arg) for arg in args if arg] if no_rc: cmd += ["--no-rc"] diff --git a/micromamba/tests/test_create.py b/micromamba/tests/test_create.py index 79af86846c..b0c6e77dea 100644 --- a/micromamba/tests/test_create.py +++ b/micromamba/tests/test_create.py @@ -1,821 +1,830 @@ -import json import os import platform -import random import shutil -import string import subprocess from pathlib import Path import pytest import yaml -from .helpers import * +from . import helpers -source_dir_path = os.path.dirname(os.path.realpath(__file__)) +__this_dir__ = Path(__file__).parent.resolve() -this_source_file_dir_path = Path(__file__).parent.resolve() +env_file_requires_pip_install_path = __this_dir__ / "env-requires-pip-install.yaml" -test_env_requires_pip_install_path = os.path.join( - this_source_file_dir_path, "env-requires-pip-install.yaml" -) -test_env_requires_pip_install_path_with_whitespaces = os.path.join( - this_source_file_dir_path, "env-requires-pip-install-with-spaces.yaml" +env_file_requires_pip_install_path_with_whitespaces = ( + __this_dir__ / "env-requires-pip-install-with-spaces.yaml" ) -test_envs = [ - test_env_requires_pip_install_path, - test_env_requires_pip_install_path_with_whitespaces, +env_files = [ + env_file_requires_pip_install_path, + env_file_requires_pip_install_path_with_whitespaces, ] +lockfile_path: Path = __this_dir__ / "test_env-lock.yaml" -class TestCreate: - current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"] - current_prefix = os.environ["CONDA_PREFIX"] - - env_name = random_string() - root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string())) - prefix = os.path.join(root_prefix, "envs", env_name) - other_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string())) - spec_files_location = os.path.expanduser( - os.path.join("~", "mamba_spec_files_test_" + random_string()) +def check_create_result(res, root_prefix, target_prefix): + assert res["root_prefix"] == str(root_prefix) + assert res["target_prefix"] == str(target_prefix) + assert not res["use_target_prefix_fallback"] + checks = ( + helpers.MAMBA_ALLOW_EXISTING_PREFIX + | helpers.MAMBA_NOT_ALLOW_MISSING_PREFIX + | helpers.MAMBA_ALLOW_NOT_ENV_PREFIX + | helpers.MAMBA_NOT_EXPECT_EXISTING_PREFIX ) + assert res["target_prefix_checks"] == checks + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize( + "source,file_type", + [ + ("cli_only", None), + ("spec_file_only", "classic"), + ("spec_file_only", "explicit"), + ("spec_file_only", "yaml"), + ("both", "classic"), + ("both", "explicit"), + ("both", "yaml"), + ], +) +@pytest.mark.parametrize("create_cmd", ["create", "env create"]) +def test_specs(tmp_home, tmp_root_prefix, tmp_path, source, file_type, create_cmd): + env_prefix = tmp_path / "myenv" + + cmd = ["-p", env_prefix] + specs = [] + + if source in ("cli_only", "both"): + specs = ["xframe", "xtl"] + cmd += specs + + if source in ("spec_file_only", "both"): + spec_file = str(tmp_path / "env") + + if file_type == "classic": + file_content = ["xtensor >0.20", "xsimd"] + specs += file_content + elif file_type == "explicit": + explicit_specs = [ + "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", + "https://conda.anaconda.org/conda-forge/linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f", + ] + file_content = ["@EXPLICIT"] + explicit_specs + specs = explicit_specs + elif file_type == "yaml": + spec_file += ".yaml" + file_content = ["dependencies:", " - xtensor >0.20", " - xsimd"] + specs += ["xtensor >0.20", "xsimd"] + else: + raise RuntimeError("unhandled file type : ", file_type) - test_lockfile_path = os.path.realpath( - os.path.join(source_dir_path, "test_env-lock.yaml") - ) + with open(spec_file, "w") as f: + f.write("\n".join(file_content)) - def test_env_lockfile_step_path(step_number): - return os.path.join( - source_dir_path, f"envlockfile-check-step-{step_number}-lock.yaml" - ) + cmd += ["-f", spec_file] - @classmethod - def setup_class(cls): - assert os.path.exists(TestCreate.test_lockfile_path) - os.environ["MAMBA_ROOT_PREFIX"] = TestCreate.root_prefix - os.makedirs(TestCreate.spec_files_location, exist_ok=True) - - @classmethod - def teardown_class(cls): - os.environ["MAMBA_ROOT_PREFIX"] = TestCreate.current_root_prefix - os.environ["CONDA_PREFIX"] = TestCreate.current_prefix - - if Path(TestCreate.spec_files_location).exists(): - shutil.rmtree(TestCreate.spec_files_location) - - @classmethod - def teardown(cls): - os.environ["MAMBA_ROOT_PREFIX"] = TestCreate.root_prefix - os.environ["CONDA_PREFIX"] = TestCreate.prefix - - for v in ("CONDA_CHANNELS", "MAMBA_TARGET_PREFIX"): - if v in os.environ: - os.environ.pop(v) - - if Path(TestCreate.root_prefix).exists(): - shutil.rmtree(TestCreate.root_prefix) - if Path(TestCreate.other_prefix).exists(): - shutil.rmtree(TestCreate.other_prefix) - - @classmethod - def config_tests(cls, res, root_prefix, target_prefix): - assert res["root_prefix"] == root_prefix - assert res["target_prefix"] == target_prefix - assert not res["use_target_prefix_fallback"] - checks = ( - MAMBA_ALLOW_EXISTING_PREFIX - | MAMBA_NOT_ALLOW_MISSING_PREFIX - | MAMBA_ALLOW_NOT_ENV_PREFIX - | MAMBA_NOT_EXPECT_EXISTING_PREFIX - ) - assert res["target_prefix_checks"] == checks - - @pytest.mark.parametrize( - "source,file_type", - [ - ("cli_only", None), - ("spec_file_only", "classic"), - ("spec_file_only", "explicit"), - ("spec_file_only", "yaml"), - ("both", "classic"), - ("both", "explicit"), - ("both", "yaml"), - ], - ) - @pytest.mark.parametrize("create_cmd", ["create", "env create"]) - def test_specs(self, source, file_type, existing_cache, create_cmd): - cmd = ["-p", TestCreate.prefix] - specs = [] - - if source in ("cli_only", "both"): - specs = ["xframe", "xtl"] - cmd += specs - - if source in ("spec_file_only", "both"): - f_name = random_string() - spec_file = os.path.join(TestCreate.spec_files_location, f_name) - - if file_type == "classic": - file_content = ["xtensor >0.20", "xsimd"] - specs += file_content - elif file_type == "explicit": - explicit_specs = [ - "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", - "https://conda.anaconda.org/conda-forge/linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f", - ] - file_content = ["@EXPLICIT"] + explicit_specs - specs = explicit_specs - elif file_type == "yaml": - spec_file += ".yaml" - file_content = ["dependencies:", " - xtensor >0.20", " - xsimd"] - specs += ["xtensor >0.20", "xsimd"] - else: - raise RuntimeError("unhandled file type : ", file_type) - - os.makedirs(TestCreate.root_prefix, exist_ok=True) - with open(spec_file, "w") as f: - f.write("\n".join(file_content)) - - cmd += ["-f", spec_file] - - res = create(*cmd, "--print-config-only", create_cmd=create_cmd) - - TestCreate.config_tests(res, TestCreate.root_prefix, TestCreate.prefix) - assert res["env_name"] == "" - assert res["specs"] == specs - - json_res = create(*cmd, "--json", create_cmd=create_cmd) - assert json_res["success"] == True - - def test_lockfile(self): - cmd_prefix = ["-p", TestCreate.prefix] - f_name = random_string() - spec_file = os.path.join(TestCreate.spec_files_location, f_name) + "-lock.yaml" - shutil.copyfile(TestCreate.test_lockfile_path, spec_file) - assert os.path.exists(spec_file) - - res = create(*cmd_prefix, "-f", spec_file, "--json") - assert res["success"] == True - - packages = umamba_list(*cmd_prefix, "--json") - assert any( - package["name"] == "zlib" and package["version"] == "1.2.11" - for package in packages - ) + res = helpers.create(*cmd, "--print-config-only", create_cmd=create_cmd) - def test_lockfile_online(self): - cmd_prefix = ["-p", TestCreate.prefix] - spec_file = "https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml" + check_create_result(res, tmp_root_prefix, env_prefix) + assert res["env_name"] == "" + assert res["specs"] == specs - res = create(*cmd_prefix, "-f", spec_file, "--json") - assert res["success"] == True + json_res = helpers.create(*cmd, "--json", create_cmd=create_cmd) + assert json_res["success"] - packages = umamba_list(*cmd_prefix, "--json") - assert any( - package["name"] == "zlib" and package["version"] == "1.2.11" - for package in packages - ) - def test_env_lockfile_different_install_after_create(self): - cmd_prefix = ["-p", TestCreate.prefix] - create_spec_file = ( - os.path.join(TestCreate.spec_files_location, "env-create") + "-lock.yaml" - ) - shutil.copyfile(TestCreate.test_env_lockfile_step_path(1), create_spec_file) - assert os.path.exists(create_spec_file) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_lockfile(tmp_home, tmp_root_prefix, tmp_path): + env_prefix = tmp_path / "myenv" + spec_file = tmp_path / "env-lock.yaml" - install_spec_file = ( - os.path.join(TestCreate.spec_files_location, "env-install") + "-lock.yaml" - ) - shutil.copyfile(TestCreate.test_env_lockfile_step_path(2), install_spec_file) - assert os.path.exists(install_spec_file) - - res = create(*cmd_prefix, "-f", create_spec_file, "-y", "--json") - assert res["success"] == True - - install(*cmd_prefix, "-f", install_spec_file, "-y", "--json") # Must not crash - - @pytest.mark.parametrize("root_prefix", (None, "env_var", "cli")) - @pytest.mark.parametrize("target_is_root", (False, True)) - @pytest.mark.parametrize("cli_prefix", (False, True)) - @pytest.mark.parametrize("cli_env_name", (False, True)) - @pytest.mark.parametrize("yaml_name", (False, True, "prefix")) - @pytest.mark.parametrize("env_var", (False, True)) - @pytest.mark.parametrize("fallback", (False, True)) - @pytest.mark.parametrize( - "similar_non_canonical,non_canonical_position", - ((False, None), (True, "append"), (True, "prepend")), - ) - def test_target_prefix( - self, - root_prefix, - target_is_root, - cli_prefix, - cli_env_name, - yaml_name, - env_var, - fallback, - similar_non_canonical, - non_canonical_position, - existing_cache, - ): - cmd = [] + shutil.copyfile(lockfile_path, spec_file) - if root_prefix in (None, "cli"): - os.environ["MAMBA_DEFAULT_ROOT_PREFIX"] = os.environ.pop( - "MAMBA_ROOT_PREFIX" - ) + res = helpers.create("-p", env_prefix, "-f", spec_file, "--json") + assert res["success"] - if root_prefix == "cli": - cmd += ["-r", TestCreate.root_prefix] + packages = helpers.umamba_list("-p", env_prefix, "--json") + assert any( + package["name"] == "zlib" and package["version"] == "1.2.11" + for package in packages + ) - r = TestCreate.root_prefix - if target_is_root: - p = r - n = "base" - else: - p = TestCreate.prefix - n = TestCreate.env_name - - expected_p = os.path.realpath(p) - if similar_non_canonical: - if non_canonical_position == "append": - p = os.path.join(p, ".") - else: - home = os.path.expanduser("~") - p = p.replace(home, os.path.join(home, ".")) - - if cli_prefix: - cmd += ["-p", p] - - if cli_env_name: - cmd += ["-n", n] - - if yaml_name: - f_name = random_string() + ".yaml" - spec_file = os.path.join(TestCreate.spec_files_location, f_name) - - if yaml_name == "prefix": - yaml_n = p - else: - yaml_n = "yaml_name" - if not (cli_prefix or cli_env_name): - expected_p = os.path.join(TestCreate.root_prefix, "envs", yaml_n) - - file_content = [ - f"name: {yaml_n}", - "dependencies: [xtensor]", - ] - with open(spec_file, "w") as f: - f.write("\n".join(file_content)) - cmd += ["-f", spec_file] +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_lockfile_online(tmp_home, tmp_root_prefix, tmp_path): + env_prefix = tmp_path / "myenv" + spec_file = "https://raw.githubusercontent.com/mamba-org/mamba/main/micromamba/tests/test_env-lock.yaml" - if env_var: - os.environ["MAMBA_TARGET_PREFIX"] = p + res = helpers.create("-p", env_prefix, "-f", spec_file, "--json") + assert res["success"] - if not fallback: - os.environ.pop("CONDA_PREFIX") - else: - os.environ["CONDA_PREFIX"] = p - - if ( - (cli_prefix and cli_env_name) - or (yaml_name == "prefix") - or not (cli_prefix or cli_env_name or yaml_name or env_var) - ): - with pytest.raises(subprocess.CalledProcessError): - create(*cmd, "--print-config-only") - else: - res = create(*cmd, "--print-config-only") - TestCreate.config_tests(res, root_prefix=r, target_prefix=expected_p) - - @pytest.mark.parametrize("cli", (False, True)) - @pytest.mark.parametrize("yaml", (False, True)) - @pytest.mark.parametrize("env_var", (False, True)) - @pytest.mark.parametrize("rc_file", (False, True)) - def test_channels(self, cli, yaml, env_var, rc_file, existing_cache): - cmd = ["-p", TestCreate.prefix] - expected_channels = [] - - if cli: - cmd += ["-c", "cli"] - expected_channels += ["cli"] - - if yaml: - f_name = random_string() + ".yaml" - spec_file = os.path.join(TestCreate.spec_files_location, f_name) - - file_content = [ - "channels: [yaml]", - "dependencies: [xtensor]", - ] + packages = helpers.umamba_list("-p", env_prefix, "--json") + assert any( + package["name"] == "zlib" and package["version"] == "1.2.11" + for package in packages + ) - with open(spec_file, "w") as f: - f.write("\n".join(file_content)) - cmd += ["-f", spec_file] - expected_channels += ["yaml"] - if env_var: - os.environ["CONDA_CHANNELS"] = "env_var" - expected_channels += ["env_var"] +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_env_lockfile_different_install_after_create( + tmp_home, tmp_root_prefix, tmp_path +): + env_prefix = tmp_path / "myenv" + create_spec_file = tmp_path / "env-create-lock.yaml" + install_spec_file = tmp_path / "env-install-lock.yaml" - if rc_file: - f_name = random_string() + ".yaml" - rc_file = os.path.join(TestCreate.spec_files_location, f_name) + shutil.copyfile( + __this_dir__ / "envlockfile-check-step-1-lock.yaml", create_spec_file + ) + shutil.copyfile( + __this_dir__ / "envlockfile-check-step-2-lock.yaml", install_spec_file + ) - file_content = ["channels: [rc]"] - with open(rc_file, "w") as f: - f.write("\n".join(file_content)) + res = helpers.create("-p", env_prefix, "-f", create_spec_file, "-y", "--json") + assert res["success"] - cmd += ["--rc-file", rc_file] - expected_channels += ["rc"] + # Must not crash + helpers.install("-p", env_prefix, "-f", install_spec_file, "-y", "--json") - res = create( - *cmd, "--print-config-only", no_rc=not rc_file, default_channel=False - ) - TestCreate.config_tests(res, TestCreate.root_prefix, TestCreate.prefix) - if expected_channels: - assert res["channels"] == expected_channels - else: - assert res["channels"] is None - - @pytest.mark.parametrize("type", ("yaml", "classic", "explicit")) - def test_multiple_spec_files(self, type, existing_cache): - cmd = ["-p", TestCreate.prefix] - specs = ["xtensor", "xsimd"] - explicit_specs = [ - "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", - "https://conda.anaconda.org/conda-forge/linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f", - ] - for i in range(2): - f_name = random_string() - file = os.path.join(TestCreate.spec_files_location, f_name) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("root_prefix_type", (None, "env_var", "cli")) +@pytest.mark.parametrize("target_is_root", (False, True)) +@pytest.mark.parametrize("cli_prefix", (False, True)) +@pytest.mark.parametrize("cli_env_name", (False, True)) +@pytest.mark.parametrize("yaml_name", (False, True, "prefix")) +@pytest.mark.parametrize("env_var", (False, True)) +@pytest.mark.parametrize("fallback", (False, True)) +@pytest.mark.parametrize( + "similar_non_canonical,non_canonical_position", + ((False, None), (True, "append"), (True, "prepend")), +) +def test_target_prefix( + tmp_home, + tmp_root_prefix, + tmp_path, + root_prefix_type, + target_is_root, + cli_prefix, + cli_env_name, + yaml_name, + env_var, + fallback, + similar_non_canonical, + non_canonical_position, +): + cmd = [] + + if root_prefix_type is None: + root_prefix = Path(os.environ["MAMBA_ROOT_PREFIX"]) + elif root_prefix_type == "cli": + root_prefix = tmp_path / "myroot" + cmd += ["-r", root_prefix] + else: + root_prefix = Path(os.environ["MAMBA_ROOT_PREFIX"]) + + env_prefix = tmp_path / "myenv" + + if target_is_root: + p = root_prefix + n = "base" + else: + p = env_prefix + n = "someenv" + + expected_p = p.resolve() + if cli_env_name and not target_is_root: + expected_p = root_prefix / "envs" / n + + if similar_non_canonical: + if non_canonical_position == "append": + p = p / "." + else: + p = p.parent / "." / p.name - if type == "yaml": - file += ".yaml" - file_content = [f"dependencies: [{specs[i]}]"] - elif type == "classic": - file_content = [specs[i]] - expected_specs = specs - else: # explicit - file_content = ["@EXPLICIT", explicit_specs[i]] + if cli_prefix: + cmd += ["-p", p] - with open(file, "w") as f: - f.write("\n".join(file_content)) + if cli_env_name: + cmd += ["-n", n] - cmd += ["-f", file] + if yaml_name: + spec_file = tmp_path / "env.yaml" - if type == "yaml": - with pytest.raises(subprocess.CalledProcessError): - create(*cmd, "--print-config-only") + if yaml_name == "prefix": + yaml_n = str(p) else: - res = create(*cmd, "--print-config-only") - if type == "classic": - assert res["specs"] == specs - else: # explicit - assert res["specs"] == [explicit_specs[0]] - - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize( - "already_exists, is_conda_env", ((False, False), (True, False), (True, True)) - ) - @pytest.mark.parametrize("has_specs", (False, True)) - def test_create_base(self, already_exists, is_conda_env, has_specs, existing_cache): - if already_exists: - if is_conda_env: - os.makedirs( - os.path.join(TestCreate.root_prefix, "conda-meta"), exist_ok=False - ) - else: - os.makedirs(TestCreate.root_prefix) - - cmd = ["-n", "base"] - if has_specs: - cmd += ["xtensor"] - - if already_exists: - with pytest.raises(subprocess.CalledProcessError): - create(*cmd) - else: - create(*cmd) - assert Path(os.path.join(TestCreate.root_prefix, "conda-meta")).exists() + yaml_n = "yaml_name" + if not (cli_prefix or cli_env_name): + expected_p = root_prefix / "envs" / yaml_n - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize("outside_root_prefix", (False, True)) - def test_classic_specs(self, outside_root_prefix, existing_cache): - if outside_root_prefix: - p = TestCreate.other_prefix - else: - p = TestCreate.prefix + file_content = [ + f"name: {yaml_n}", + "dependencies: [xtensor]", + ] + with open(spec_file, "w") as f: + f.write("\n".join(file_content)) + cmd += ["-f", spec_file] + + if env_var: + os.environ["MAMBA_TARGET_PREFIX"] = str(p) - res = create("-p", p, "xtensor", "--json") + if not fallback: + os.environ.pop("CONDA_PREFIX", None) + else: + os.environ["CONDA_PREFIX"] = str(p) - assert res["success"] - assert res["dry_run"] == (dry_run_tests == DryRun.DRY) + if ( + (cli_prefix and cli_env_name) + or (yaml_name == "prefix") + or not (cli_prefix or cli_env_name or yaml_name or env_var) + ): + with pytest.raises(subprocess.CalledProcessError): + helpers.create(*cmd, "--print-config-only") + else: + res = helpers.create(*cmd, "--print-config-only") + check_create_result(res, root_prefix=root_prefix, target_prefix=expected_p) + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("cli", (False, True)) +@pytest.mark.parametrize("yaml", (False, True)) +@pytest.mark.parametrize("env_var", (False, True)) +@pytest.mark.parametrize("rc_file", (False, True)) +def test_channels(tmp_home, tmp_root_prefix, tmp_path, cli, yaml, env_var, rc_file): + env_prefix = tmp_path / "myenv" + spec_file = tmp_path / "env.yaml" + rc_file = tmp_path / "rc.yaml" + + cmd = ["-p", env_prefix] + expected_channels = [] + + if cli: + cmd += ["-c", "cli"] + expected_channels += ["cli"] + + if yaml: + file_content = [ + "channels: [yaml]", + "dependencies: [xtensor]", + ] - keys = {"success", "prefix", "actions", "dry_run"} - assert keys.issubset(set(res.keys())) + with open(spec_file, "w") as f: + f.write("\n".join(file_content)) + cmd += ["-f", spec_file] + expected_channels += ["yaml"] - action_keys = {"LINK", "PREFIX"} - assert action_keys.issubset(set(res["actions"].keys())) + if env_var: + os.environ["CONDA_CHANNELS"] = "env_var" + expected_channels += ["env_var"] - packages = {pkg["name"] for pkg in res["actions"]["LINK"]} - expected_packages = {"xtensor", "xtl"} - assert expected_packages.issubset(packages) + if rc_file: + file_content = ["channels: [rc]"] + with open(rc_file, "w") as f: + f.write("\n".join(file_content)) - if dry_run_tests == DryRun.OFF: - pkg_name = get_concrete_pkg(res, "xtensor") - cached_file = existing_cache / pkg_name / xtensor_hpp - assert cached_file.exists() + cmd += ["--rc-file", rc_file] + expected_channels += ["rc"] - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" + res = helpers.create( + *cmd, "--print-config-only", no_rc=not rc_file, default_channel=False ) - @pytest.mark.parametrize("valid", [False, True]) - def test_explicit_specs(self, valid, existing_cache): - spec_file_content = [ - "@EXPLICIT", - "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", - ] - if not valid: - spec_file_content += ["https://conda.anaconda.org/conda-forge/linux-64/xtl"] + check_create_result(res, tmp_root_prefix, env_prefix) + if expected_channels: + assert res["channels"] == expected_channels + else: + assert res["channels"] is None + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("type", ("yaml", "classic", "explicit")) +def test_multiple_spec_files(tmp_home, tmp_root_prefix, tmp_path, type): + env_prefix = tmp_path / "myenv" + + cmd = ["-p", env_prefix] + specs = ["xtensor", "xsimd"] + explicit_specs = [ + "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", + "https://conda.anaconda.org/conda-forge/linux-64/xsimd-7.4.8-hc9558a2_0.tar.bz2#32d5b7ad7d6511f1faacf87e53a63e5f", + ] + + for i in range(2): + if type == "yaml": + spec_file = tmp_path / f"env{i}.yaml" + file_content = [f"dependencies: [{specs[i]}]"] + elif type == "classic": + spec_file = tmp_path / f"env{i}.txt" + file_content = [specs[i]] + else: # explicit + spec_file = tmp_path / f"env{i}.txt" + file_content = ["@EXPLICIT", explicit_specs[i]] - spec_file = os.path.join(TestCreate.spec_files_location, "explicit_specs.txt") with open(spec_file, "w") as f: - f.write("\n".join(spec_file_content)) + f.write("\n".join(file_content)) - cmd = ("-p", TestCreate.prefix, "-q", "-f", spec_file) + cmd += ["-f", spec_file] - if valid: - create(*cmd, default_channel=False) + if type == "yaml": + with pytest.raises(subprocess.CalledProcessError): + helpers.create(*cmd, "--print-config-only") + else: + res = helpers.create(*cmd, "--print-config-only") + if type == "classic": + assert res["specs"] == specs + else: # explicit + assert res["specs"] == [explicit_specs[0]] - list_res = umamba_list("-p", TestCreate.prefix, "--json") - assert len(list_res) == 1 - pkg = list_res[0] - assert pkg["name"] == "xtensor" - assert pkg["version"] == "0.21.5" - assert pkg["build_string"] == "hc9558a2_0" - else: - with pytest.raises(subprocess.CalledProcessError): - create(*cmd, default_channel=False) - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize("prefix_selector", [None, "prefix", "name"]) - @pytest.mark.parametrize("create_cmd", ["create", "env create"]) - def test_create_empty(self, prefix_selector, existing_cache, create_cmd): - if prefix_selector == "name": - cmd = ("-n", TestCreate.env_name, "--json") - elif prefix_selector == "prefix": - cmd = ("-p", TestCreate.prefix, "--json") - else: - with pytest.raises(subprocess.CalledProcessError): - create("--json", create_cmd=create_cmd) - return +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize( + "already_exists, is_conda_env", ((False, False), (True, False), (True, True)) +) +@pytest.mark.parametrize("has_specs", (False, True)) +def test_create_base( + tmp_home, tmp_root_prefix, already_exists, is_conda_env, has_specs +): + if already_exists: + if is_conda_env: + (tmp_root_prefix / "conda-meta").mkdir() + else: + tmp_root_prefix.rmdir() + + cmd = ["-n", "base"] + if has_specs: + cmd += ["xtensor"] + + if already_exists: + with pytest.raises(subprocess.CalledProcessError): + helpers.create(*cmd) + else: + helpers.create(*cmd) + assert (tmp_root_prefix / "conda-meta").exists() + + +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("outside_root_prefix", (False, True)) +def test_classic_specs(tmp_home, tmp_root_prefix, tmp_path, outside_root_prefix): + tmp_pkgs_dirs = tmp_path / "cache" + os.environ["CONDA_PKGS_DIRS"] = str(tmp_pkgs_dirs) + if outside_root_prefix: + p = tmp_path / "myenv" + else: + p = tmp_root_prefix / "envs" / "myenv" - res = create(*cmd, create_cmd=create_cmd) + res = helpers.create("-p", p, "xtensor", "--json") - keys = {"success"} - assert keys.issubset(set(res.keys())) - assert res["success"] + assert res["success"] + assert res["dry_run"] == (helpers.dry_run_tests == helpers.DryRun.DRY) - assert Path(os.path.join(TestCreate.prefix, "conda-meta", "history")).exists() + keys = {"success", "prefix", "actions", "dry_run"} + assert keys.issubset(set(res.keys())) - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize("relocate_prefix", ["/home/bob/env", "/"]) - def test_create_with_relocate_prefix(self, relocate_prefix, existing_cache): - res = create( - "-p", - TestCreate.prefix, - "--relocate-prefix", - relocate_prefix, - "python=3.11", + action_keys = {"LINK", "PREFIX"} + assert action_keys.issubset(set(res["actions"].keys())) + + packages = {pkg["name"] for pkg in res["actions"]["LINK"]} + expected_packages = {"xtensor", "xtl"} + assert expected_packages.issubset(packages) + + if helpers.dry_run_tests == helpers.DryRun.OFF: + pkg_name = helpers.get_concrete_pkg(res, "xtensor") + cached_file = tmp_pkgs_dirs / pkg_name / helpers.xtensor_hpp + assert cached_file.exists() + + +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("valid", [False, True]) +def test_explicit_specs(tmp_home, tmp_root_prefix, tmp_path, valid): + spec_file_content = [ + "@EXPLICIT", + "https://conda.anaconda.org/conda-forge/linux-64/xtensor-0.21.5-hc9558a2_0.tar.bz2#d330e02e5ed58330638a24601b7e4887", + ] + if not valid: + spec_file_content += ["https://conda.anaconda.org/conda-forge/linux-64/xtl"] + + spec_file = tmp_path / "explicit_specs.txt" + with open(spec_file, "w") as f: + f.write("\n".join(spec_file_content)) + + env_prefix = tmp_path / "myenv" + cmd = ("-p", env_prefix, "-q", "-f", spec_file) + + if valid: + helpers.create(*cmd, default_channel=False) + + list_res = helpers.umamba_list("-p", env_prefix, "--json") + assert len(list_res) == 1 + pkg = list_res[0] + assert pkg["name"] == "xtensor" + assert pkg["version"] == "0.21.5" + assert pkg["build_string"] == "hc9558a2_0" + else: + with pytest.raises(subprocess.CalledProcessError): + helpers.create(*cmd, default_channel=False) + + +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("prefix_selector", [None, "prefix", "name"]) +@pytest.mark.parametrize("create_cmd", ["create", "env create"]) +def test_create_empty(tmp_home, tmp_root_prefix, tmp_path, prefix_selector, create_cmd): + if prefix_selector == "name": + cmd = ("-n", "myenv", "--json") + effective_prefix = tmp_root_prefix / "envs" / "myenv" + elif prefix_selector == "prefix": + effective_prefix = tmp_path / "some-prefix" + cmd = ("-p", effective_prefix, "--json") + else: + with pytest.raises(subprocess.CalledProcessError): + helpers.create("--json", create_cmd=create_cmd) + return + + res = helpers.create(*cmd, create_cmd=create_cmd) + + keys = {"success"} + assert keys.issubset(set(res.keys())) + assert res["success"] + + assert (effective_prefix / "conda-meta" / "history").exists() + + +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("source", ["cli", "env_var", "rc_file"]) +def test_always_yes(tmp_home, tmp_root_prefix, tmp_path, source): + env_name = "myenv" + helpers.create("-n", env_name, "xtensor", no_dry_run=True) + + if source == "cli": + res = helpers.create("-n", env_name, "xtensor", "--json", always_yes=True) + elif source == "env_var": + try: + os.environ["MAMBA_ALWAYS_YES"] = "true" + res = helpers.create("-n", env_name, "xtensor", "--json", always_yes=False) + finally: + os.environ.pop("MAMBA_ALWAYS_YES") + else: # rc_file + rc_file = tmp_path / "config.yaml" + with open(rc_file, "w") as f: + f.write("always_yes: true") + res = helpers.create( + "-n", + env_name, + "xtensor", + f"--rc-file={rc_file}", "--json", - no_dry_run=True, + always_yes=False, + no_rc=False, ) - assert res["success"] - if platform.system() != "Windows": - with open(Path(TestCreate.prefix) / "bin" / "2to3") as f: - firstline = f.readline() - assert firstline == f"#!{relocate_prefix}/bin/python3.11\n" - - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize("source", ["cli", "env_var", "rc_file"]) - def test_always_yes(self, source, existing_cache): - create("-n", TestCreate.env_name, "xtensor", no_dry_run=True) - - if source == "cli": - res = create( - "-n", TestCreate.env_name, "xtensor", "--json", always_yes=True - ) - elif source == "env_var": - try: - os.environ["MAMBA_ALWAYS_YES"] = "true" - res = create( - "-n", TestCreate.env_name, "xtensor", "--json", always_yes=False - ) - finally: - os.environ.pop("MAMBA_ALWAYS_YES") - else: # rc_file - rc_file = os.path.join( - TestCreate.spec_files_location, random_string() + ".yaml" - ) - with open(rc_file, "w") as f: - f.write("always_yes: true") - res = create( - "-n", - TestCreate.env_name, - "xtensor", - f"--rc-file={rc_file}", - "--json", - always_yes=False, - no_rc=False, - ) - - assert res["success"] - assert res["dry_run"] == (dry_run_tests == DryRun.DRY) - - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize( - "alias", - [ - None, - "https://conda.anaconda.org/", - "https://repo.mamba.pm/", - "https://repo.mamba.pm", - ], + + assert res["success"] + assert res["dry_run"] == (helpers.dry_run_tests == helpers.DryRun.DRY) + + +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("relocate_prefix", ["/home/bob/env", "/"]) +def test_create_with_relocate_prefix( + tmp_home, tmp_root_prefix, tmp_path, relocate_prefix +): + env_prefix = tmp_path / "myenv" + res = helpers.create( + "-p", + env_prefix, + "--relocate-prefix", + relocate_prefix, + "python=3.11", + "--json", + no_dry_run=True, ) - def test_channel_alias(self, alias, existing_cache): - if alias: - res = create( - "-n", - TestCreate.env_name, - "xtensor", - "--json", - "--channel-alias", - alias, - ) - ca = alias.rstrip("/") - else: - res = create("-n", TestCreate.env_name, "xtensor", "--json") - ca = "https://conda.anaconda.org" + assert res["success"] + if platform.system() != "Windows": + with open(env_prefix / "bin" / "2to3") as f: + firstline = f.readline() + assert firstline == f"#!{relocate_prefix}/bin/python3.11\n" - for l in res["actions"]["LINK"]: - assert l["channel"].startswith(f"{ca}/conda-forge/") - assert l["url"].startswith(f"{ca}/conda-forge/") - def test_spec_with_channel(self, existing_cache): - res = create("-n", TestCreate.env_name, "bokeh::bokeh", "--json", "--dry-run") +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize( + "alias", + [ + None, + "https://conda.anaconda.org/", + "https://repo.mamba.pm/", + "https://repo.mamba.pm", + ], +) +def test_channel_alias(tmp_home, tmp_root_prefix, alias): + env_name = "myenv" + if alias: + res = helpers.create( + "-n", + env_name, + "xtensor", + "--json", + "--channel-alias", + alias, + ) + ca = alias.rstrip("/") + else: + res = helpers.create("-n", env_name, "xtensor", "--json") ca = "https://conda.anaconda.org" - for l in res["actions"]["LINK"]: - if l["name"] == "bokeh": - assert l["channel"].startswith(f"{ca}/bokeh/") - assert l["url"].startswith(f"{ca}/bokeh/") + for link in res["actions"]["LINK"]: + assert link["channel"].startswith(f"{ca}/conda-forge/") + assert link["url"].startswith(f"{ca}/conda-forge/") - f_name = random_string() + ".yaml" - spec_file = os.path.join(TestCreate.spec_files_location, f_name) - contents = [ - "dependencies:", - " - bokeh::bokeh", - " - conda-forge::xtensor 0.22.*", - ] - with open(spec_file, "w") as fs: - fs.write("\n".join(contents)) +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_spec_with_channel(tmp_home, tmp_root_prefix, tmp_path): + env_name = "myenv" + res = helpers.create("-n", env_name, "bokeh::bokeh", "--json", "--dry-run") + ca = "https://conda.anaconda.org" + + for link in res["actions"]["LINK"]: + if link["name"] == "bokeh": + assert link["channel"].startswith(f"{ca}/bokeh/") + assert link["url"].startswith(f"{ca}/bokeh/") + + spec_file = tmp_path / "env.yaml" + contents = [ + "dependencies:", + " - bokeh::bokeh", + " - conda-forge::xtensor 0.22.*", + ] + with open(spec_file, "w") as fs: + fs.write("\n".join(contents)) + + res = helpers.create("-n", env_name, "-f", spec_file, "--json", "--dry-run") + + link_packages = [link["name"] for link in res["actions"]["LINK"]] + assert "bokeh" in link_packages + assert "xtensor" in link_packages + + for link in res["actions"]["LINK"]: + if link["name"] == "bokeh": + assert link["channel"].startswith(f"{ca}/bokeh/") + assert link["url"].startswith(f"{ca}/bokeh/") + + if link["name"] == "xtensor": + assert link["channel"].startswith(f"{ca}/conda-forge/") + assert link["url"].startswith(f"{ca}/conda-forge/") + assert link["version"].startswith("0.22.") + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_channel_nodefaults(tmp_home, tmp_root_prefix, tmp_path): + rc_file = tmp_path / "rc.yaml" + content = [ + "channels:", + " - rc", + ] + with open(rc_file, "w") as f: + f.write("\n".join(content)) + + spec_file = tmp_path / "env.yaml" + contents = [ + "channels:", + " - yaml", + " - nodefaults", + "dependencies:", + " - xframe", + ] + with open(spec_file, "w") as f: + f.write("\n".join(contents)) + + res = helpers.create( + "-n", + "myenv", + "-f", + spec_file, + "--print-config-only", + f"--rc-file={rc_file}", + default_channel=False, + no_rc=False, + ) - res = create("-n", TestCreate.env_name, "-f", spec_file, "--json", "--dry-run") + assert res["channels"] == ["yaml"] - link_packages = [l["name"] for l in res["actions"]["LINK"]] - assert "bokeh" in link_packages - assert "xtensor" in link_packages - for l in res["actions"]["LINK"]: - if l["name"] == "bokeh": - assert l["channel"].startswith(f"{ca}/bokeh/") - assert l["url"].startswith(f"{ca}/bokeh/") +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_set_platform(tmp_home, tmp_root_prefix): + env_name = "myenv" + # test a dummy platform/arch + helpers.create("-n", env_name, "--platform", "ptf-128") + rc_file = tmp_root_prefix / "envs" / env_name / ".mambarc" + assert (rc_file).exists() - if l["name"] == "xtensor": - assert l["channel"].startswith(f"{ca}/conda-forge/") - assert l["url"].startswith(f"{ca}/conda-forge/") - assert l["version"].startswith("0.22.") + rc_dict = None + with open(rc_file) as f: + rc_dict = yaml.load(f, Loader=yaml.FullLoader) + assert rc_dict + assert set(rc_dict.keys()) == {"platform"} + assert rc_dict["platform"] == "ptf-128" - def test_channel_nodefaults(self): - f_name = random_string() + ".yaml" - rc_file = os.path.join(TestCreate.spec_files_location, f_name) + res = helpers.info("-n", env_name, "--json") + assert "__archspec=1=128" in res["virtual packages"] + assert res["platform"] == "ptf-128" - content = [ - "channels:", - " - rc", - ] - with open(rc_file, "w") as f: - f.write("\n".join(content)) - - f_name = random_string() + ".yaml" - spec_file = os.path.join(TestCreate.spec_files_location, f_name) - contents = [ - "channels:", - " - yaml", - " - nodefaults", - "dependencies:", - " - xframe", - ] - with open(spec_file, "w") as f: - f.write("\n".join(contents)) + # test virtual packages + helpers.create("-n", env_name, "--platform", "win-32") + res = helpers.info("-n", env_name, "--json") + assert "__archspec=1=x86" in res["virtual packages"] + assert "__win=0=0" in res["virtual packages"] + assert res["platform"] == "win-32" - res = create( - "-n", - TestCreate.env_name, - "-f", - spec_file, - "--print-config-only", - f"--rc-file={rc_file}", - default_channel=False, - no_rc=False, - ) - assert res["channels"] == ["yaml"] - - def test_set_platform(self, existing_cache): - # test a dummy platform/arch - create("-n", TestCreate.env_name, "--platform", "ptf-128") - rc_file = Path(TestCreate.prefix) / ".mambarc" - assert (rc_file).exists() - - rc_dict = None - with open(rc_file) as f: - rc_dict = yaml.load(f, Loader=yaml.FullLoader) - assert rc_dict - assert set(rc_dict.keys()) == {"platform"} - assert rc_dict["platform"] == "ptf-128" - - res = info("-n", TestCreate.env_name, "--json") - assert "__archspec=1=128" in res["virtual packages"] - assert res["platform"] == "ptf-128" - - # test virtual packages - create("-n", TestCreate.env_name, "--platform", "win-32") - res = info("-n", TestCreate.env_name, "--json") - assert "__archspec=1=x86" in res["virtual packages"] - assert "__win=0=0" in res["virtual packages"] - assert res["platform"] == "win-32" - - @pytest.mark.skipif( - dry_run_tests is DryRun.ULTRA_DRY, reason="Running only ultra-dry tests" - ) - @pytest.mark.parametrize( - "version,build,cache_tag", - [ - ["2.7", "*", ""], - ["3.10", "*_cpython", "cpython-310"], - # FIXME: https://github.com/mamba-org/mamba/issues/1432 - # [ "3.7", "*_pypy","pypy37"], - ], - ) - def test_pyc_compilation(self, version, build, cache_tag): - prefix = Path(TestCreate.prefix) - cmd = ["-n", TestCreate.env_name, f"python={version}.*={build}", "six"] - - if platform.system() == "Windows": - site_packages = prefix / "Lib" / "site-packages" - if version == "2.7": - cmd += ["-c", "defaults"] # for vc=9.* - else: - site_packages = prefix / "lib" / f"python{version}" / "site-packages" - - if cache_tag: - pyc_fn = Path("__pycache__") / f"six.{cache_tag}.pyc" - else: - pyc_fn = Path(f"six.pyc") - - # Disable pyc compilation to ensure that files are still registered in conda-meta - create(*cmd, "--no-pyc") - assert not (site_packages / pyc_fn).exists() - six_meta = next((prefix / "conda-meta").glob("six-*.json")).read_text() - assert pyc_fn.name in six_meta - - # Enable pyc compilation to ensure that the pyc files are created - create(*cmd) - assert (site_packages / pyc_fn).exists() - assert pyc_fn.name in six_meta - - @pytest.mark.parametrize("env_file", test_envs) - def test_requires_pip_install(self, env_file): - prefix = Path(TestCreate.prefix) - cmd = ["-p", f"{prefix}", "-f", env_file] - create(*cmd) - - @pytest.mark.parametrize("env_file", test_envs) - def test_requires_pip_install_prefix_spaces(self, env_file): - prefix = Path(f"{TestCreate.prefix} with space") - cmd = ["-p", f"{prefix}", "-f", env_file] - create(*cmd) - - if platform.system() != "Windows": - pip = prefix / "bin" / "pip" - text = pip.read_text() - lines = text.splitlines() - assert lines[0] == "#!/bin/sh" - assert lines[1].startswith("'''exec'") - version = subprocess.check_output([pip, "--version"]) - assert len(version.decode()) > 0 - - @pytest.mark.parametrize("env_file", test_envs) - def test_requires_pip_install_no_parent_dir_specified(self, env_file): - prefix = Path(f"{TestCreate.prefix} with space") - initial_working_dir = os.getcwd() - try: - os.chdir( - source_dir_path - ) # Switch to the current source directory so that the file can be found without using an absolute path - env_file_name = Path(env_file).name - cmd = ["-p", f"{prefix}", "-f", env_file_name] - create(*cmd) - finally: - os.chdir(initial_working_dir) # Switch back to original working dir. - - def test_pre_commit_compat(self, tmp_path): - # We test compatibility with the downstream pre-commit package here because the pre-commit project does not currently accept any code changes related to Conda, see https://github.com/pre-commit/pre-commit/pull/2446#issuecomment-1353394177. - def create_repo(path: Path) -> str: - subprocess_run("git", "init", cwd=path) - subprocess_run("git", "config", "user.email", "test@test", cwd=path) - subprocess_run("git", "config", "user.name", "test", cwd=path) - subprocess_run("git", "add", ".", cwd=path) - subprocess_run("git", "commit", "-m", "Initialize repo", cwd=path) - return subprocess_run( - "git", "rev-parse", "HEAD", cwd=path, text=True - ).strip() - - hook_repo = tmp_path / "hook_repo" - caller_repo = tmp_path / "caller_repo" - - # Create hook_repo Git repo - shutil.copytree( - this_source_file_dir_path / "pre_commit_conda_hooks_repo", hook_repo - ) - commit_sha = create_repo(hook_repo) - - # Create Git repo to call "pre-commit" from - pre_commit_config = { - "repos": [ - { - "repo": str(hook_repo), - "rev": commit_sha, - "hooks": [ - {"id": "sys-exec"}, - { - "id": "additional-deps", - "additional_dependencies": ["psutil"], - }, - ], - } +@pytest.mark.skipif( + helpers.dry_run_tests is helpers.DryRun.ULTRA_DRY, + reason="Running only ultra-dry tests", +) +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize( + "version,build,cache_tag", + [ + ["2.7", "*", ""], + ["3.10", "*_cpython", "cpython-310"], + # FIXME: https://github.com/mamba-org/mamba/issues/1432 + # [ "3.7", "*_pypy","pypy37"], + ], +) +def test_pyc_compilation(tmp_home, tmp_root_prefix, version, build, cache_tag): + env_name = "myenv" + env_prefix = tmp_root_prefix / "envs" / env_name + cmd = ["-n", env_name, f"python={version}.*={build}", "six"] + + if platform.system() == "Windows": + site_packages = env_prefix / "Lib" / "site-packages" + if version == "2.7": + cmd += ["-c", "defaults"] # for vc=9.* + else: + site_packages = env_prefix / "lib" / f"python{version}" / "site-packages" + + if cache_tag: + pyc_fn = Path("__pycache__") / f"six.{cache_tag}.pyc" + else: + pyc_fn = Path("six.pyc") + + # Disable pyc compilation to ensure that files are still registered in conda-meta + helpers.create(*cmd, "--no-pyc") + assert not (site_packages / pyc_fn).exists() + six_meta = next((env_prefix / "conda-meta").glob("six-*.json")).read_text() + assert pyc_fn.name in six_meta + + # Enable pyc compilation to ensure that the pyc files are created + helpers.create(*cmd) + assert (site_packages / pyc_fn).exists() + assert pyc_fn.name in six_meta + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("env_file", env_files) +def test_requires_pip_install(tmp_home, tmp_root_prefix, env_file): + cmd = ["-p", "myenv", "-f", env_file] + helpers.create(*cmd) + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("env_file", env_files) +def test_requires_pip_install_prefix_spaces( + tmp_home, tmp_root_prefix, tmp_path, env_file +): + env_prefix = tmp_path / "prefix with space" + cmd = ["-p", env_prefix, "-f", env_file] + helpers.create(*cmd) + + if platform.system() != "Windows": + pip = env_prefix / "bin" / "pip" + text = pip.read_text() + lines = text.splitlines() + assert lines[0] == "#!/bin/sh" + assert lines[1].startswith("'''exec'") + version = subprocess.check_output([pip, "--version"]) + assert len(version.decode()) > 0 + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +@pytest.mark.parametrize("env_file", env_files) +def test_requires_pip_install_no_parent_dir_specified( + tmp_home, tmp_root_prefix, tmp_path, env_file +): + initial_working_dir = os.getcwd() + try: + # Switch to the current source directory so that the file can be found without + # using an absolute path + os.chdir(__this_dir__) + env_file_name = Path(env_file).name + cmd = ["-p", tmp_path / "prefix with space", "-f", env_file_name] + helpers.create(*cmd) + finally: + os.chdir(initial_working_dir) # Switch back to original working dir. + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_pre_commit_compat(tmp_home, tmp_root_prefix, tmp_path): + # We test compatibility with the downstream pre-commit package here because the pre-commit project does not currently accept any code changes related to Conda, see https://github.com/pre-commit/pre-commit/pull/2446#issuecomment-1353394177. + def create_repo(path: Path) -> str: + helpers.subprocess_run("git", "init", cwd=path) + helpers.subprocess_run("git", "config", "user.email", "test@test", cwd=path) + helpers.subprocess_run("git", "config", "user.name", "test", cwd=path) + helpers.subprocess_run("git", "add", ".", cwd=path) + helpers.subprocess_run("git", "commit", "-m", "Initialize repo", cwd=path) + return helpers.subprocess_run( + "git", "rev-parse", "HEAD", cwd=path, text=True + ).strip() + + hook_repo = tmp_path / "hook_repo" + caller_repo = tmp_path / "caller_repo" + + # Create hook_repo Git repo + shutil.copytree(__this_dir__ / "pre_commit_conda_hooks_repo", hook_repo) + commit_sha = create_repo(hook_repo) + + # Create Git repo to call "pre-commit" from + pre_commit_config = { + "repos": [ + { + "repo": str(hook_repo), + "rev": commit_sha, + "hooks": [ + {"id": "sys-exec"}, + { + "id": "additional-deps", + "additional_dependencies": ["psutil"], + }, + ], + } + ] + } + caller_repo.mkdir() + pre_commit_config_file = caller_repo / ".pre-commit-config.yaml" + pre_commit_config_file.write_text(yaml.dump(pre_commit_config)) + (caller_repo / "something.py").write_text("import psutil; print(psutil)") + create_repo(caller_repo) + + env_prefix = tmp_path / "some-prefix" + helpers.create("-p", env_prefix, "pre-commit") + env_overrides = { + "PRE_COMMIT_USE_MICROMAMBA": "1", + "PATH": os.pathsep.join( + [ + str(Path(helpers.get_umamba()).parent), + *os.environ["PATH"].split(os.pathsep), ] - } - caller_repo.mkdir() - pre_commit_config_file = caller_repo / ".pre-commit-config.yaml" - pre_commit_config_file.write_text(yaml.dump(pre_commit_config)) - (caller_repo / "something.py").write_text("import psutil; print(psutil)") - create_repo(caller_repo) - - create("-p", TestCreate.prefix, "pre-commit") - env_overrides = { - "PRE_COMMIT_USE_MICROMAMBA": "1", - "PATH": os.pathsep.join( - [str(Path(get_umamba()).parent), *os.environ["PATH"].split(os.pathsep)] - ), - } - try: - output = umamba_run( - "-p", - TestCreate.prefix, - "--cwd", - caller_repo, - "pre-commit", - "run", - "-v", - "-a", - env={**os.environ, **env_overrides}, - ) - assert "conda-default" in output - assert "= 2 - assert str(self.root_prefix) in env_json["envs"] - assert env_1_fp in env_json["envs"] - - def test_env_list_table(self): - res = run_env("list") - - assert "Name" in res - assert "base" in res - assert str(self.root_prefix) in res - lines = res.splitlines() - for l in lines: - if "*" in l: - active_env_l = l - assert str(self.root_prefix) in active_env_l - - full_env = self.root_prefix / "envs" / self.env_name_1 - os.environ["CONDA_PREFIX"] = str(full_env) - - res = run_env("list") - - lines = res.splitlines() - for l in lines: - if "*" in l: - active_env_l = l - assert str(full_env) in active_env_l - - os.environ["CONDA_PREFIX"] = str(self.root_prefix) - - def test_register_new_env(self): - - res = create( - f"", - "-n", - self.env_name_2, - "--json", - no_dry_run=True, - ) - res = create( - f"", - "-n", - self.env_name_3, - "--json", - no_dry_run=True, - ) - - env_json = run_env("list", "--json") - env_2_fp = str(self.root_prefix / "envs" / self.env_name_2) - env_3_fp = str(self.root_prefix / "envs" / self.env_name_3) - assert str(env_2_fp) in env_json["envs"] - assert str(env_3_fp) in env_json["envs"] - - shutil.rmtree(env_2_fp) - env_json = run_env("list", "--json") - assert env_2_fp not in env_json["envs"] - assert env_3_fp in env_json["envs"] - - def test_env_export(self): - env_name = "env-create-export" - spec_file = Path(__file__).parent / "env-create-export.yaml" - create("", "-n", env_name, "-f", spec_file) - ret = yaml.safe_load(run_env("export", "-n", env_name)) - assert ret["name"] == env_name - assert set(ret["channels"]) == {"conda-forge"} - assert "micromamba=0.24.0=0" in ret["dependencies"] - - def test_create(self): - # Tests for 'micromamba env create' can be found in 'test_create.py' (look for 'create_cmd') - pass - - def test_env_remove(self): - env_name = "env-create-remove" - env_fp = str(self.root_prefix / "envs" / env_name) - conda_env_file = Path(os.path.join("~", ".conda/environments.txt")).expanduser() - - # Create env with xtensor - res = create("xtensor", "-n", env_name, "--json", no_dry_run=True) - - env_json = run_env("list", "--json") - assert env_fp in env_json["envs"] - assert Path(env_fp).expanduser().exists() - with open(conda_env_file, "r", encoding="utf-8") as f: - lines = [line.strip() for line in f] - assert env_fp in lines - - # Unregister / remove env_name - run_env("remove", "-n", env_name, "-y") - env_json = run_env("list", "--json") - assert env_fp not in env_json["envs"] - assert not Path(env_fp).expanduser().exists() - with open(conda_env_file, "r", encoding="utf-8") as f: - lines = [line.strip() for line in f] - assert env_fp not in lines +from . import helpers + +__this_dir__ = Path(__file__).parent.resolve() + + +def test_env_list(tmp_home, tmp_root_prefix, tmp_empty_env): + env_json = helpers.run_env("list", "--json") + + assert "envs" in env_json + assert len(env_json["envs"]) >= 2 + assert str(tmp_root_prefix) in env_json["envs"] + assert str(tmp_empty_env) in env_json["envs"] + + +def test_env_list_table(tmp_home, tmp_root_prefix, tmp_prefix): + res = helpers.run_env("list") + + assert "Name" in res + assert "base" in res + assert str(tmp_root_prefix) in res + all_lines = res.splitlines() + print("\n".join(all_lines)) + for line in all_lines: + if "*" in line: + active_env_l = line + assert str(tmp_root_prefix) in active_env_l + + os.environ["CONDA_PREFIX"] = str(tmp_prefix) + + res = helpers.run_env("list") + + all_lines = res.splitlines() + for line in all_lines: + if "*" in line: + active_env_l = line + assert str(tmp_prefix) in active_env_l + + +def test_register_new_env(tmp_home, tmp_root_prefix): + helpers.create("-n", "env2", "--json", no_dry_run=True) + helpers.create("-n", "env3", "--json", no_dry_run=True) + + env_json = helpers.run_env("list", "--json") + env_2_fp = tmp_root_prefix / "envs" / "env2" + env_3_fp = tmp_root_prefix / "envs" / "env3" + assert str(env_2_fp) in env_json["envs"] + assert str(env_3_fp) in env_json["envs"] + + shutil.rmtree(env_2_fp) + env_json = helpers.run_env("list", "--json") + assert str(env_2_fp) not in env_json["envs"] + assert str(env_3_fp) in env_json["envs"] + + +def test_env_export(tmp_home, tmp_root_prefix): + env_name = "env-create-export" + spec_file = __this_dir__ / "env-create-export.yaml" + helpers.create("-n", env_name, "-f", spec_file) + ret = yaml.safe_load(helpers.run_env("export", "-n", env_name)) + assert ret["name"] == env_name + assert set(ret["channels"]) == {"conda-forge"} + assert "micromamba=0.24.0=0" in ret["dependencies"] + + +def test_create(): + """Tests for ``micromamba env create`` can be found in ``test_create.py``. + + Look for 'create_cmd'. + """ + pass + + +@pytest.mark.parametrize("shared_pkgs_dirs", [True], indirect=True) +def test_env_remove(tmp_home, tmp_root_prefix): + env_name = "env-create-remove" + env_fp = tmp_root_prefix / "envs" / env_name + conda_env_file = tmp_home / ".conda/environments.txt" + + # Create env with xtensor + helpers.create("xtensor", "-n", env_name, "--json", no_dry_run=True) + + env_json = helpers.run_env("list", "--json") + assert str(env_fp) in env_json["envs"] + assert env_fp.exists() + with open(conda_env_file, "r", encoding="utf-8") as f: + lines = [line.strip() for line in f] + assert str(env_fp) in lines + + # Unregister / remove env_name + helpers.run_env("remove", "-n", env_name, "-y") + env_json = helpers.run_env("list", "--json") + assert str(env_fp) not in env_json["envs"] + assert not env_fp.exists() + with open(conda_env_file, "r", encoding="utf-8") as f: + lines = [line.strip() for line in f] + assert str(env_fp) not in lines diff --git a/micromamba/tests/test_proxy.py b/micromamba/tests/test_proxy.py index 609289b90d..d2f09df28a 100644 --- a/micromamba/tests/test_proxy.py +++ b/micromamba/tests/test_proxy.py @@ -1,137 +1,117 @@ -import asyncio import os import shutil +import subprocess import time import urllib.parse from pathlib import Path -from subprocess import TimeoutExpired -from .helpers import * +import pytest +from . import helpers -class TestProxy: +__this_dir__ = Path(__file__).parent.resolve() - current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"] - current_prefix = os.environ["CONDA_PREFIX"] - env_name = random_string() - root_prefix = os.path.expanduser(os.path.join("~", "tmproot" + random_string())) - prefix = os.path.join(root_prefix, "envs", env_name) +@pytest.fixture +def mitmdump_exe(): + """Get the path to the ``mitmdump`` executable. - mitm_exe = shutil.which("mitmdump") - mitm_confdir = os.path.join(root_prefix, "mitmproxy") - mitm_dump_path = os.path.join(root_prefix, "dump.json") + If the executable is provided in a conda environment, this fixture needs to be called + before ``tmp_root_prefix`` and the like, as they will clean the ``PATH``. + """ + return Path(shutil.which("mitmdump")).resolve() - proxy_process = None - @classmethod - def setup_class(cls): - os.environ["MAMBA_ROOT_PREFIX"] = TestProxy.root_prefix - os.environ["CONDA_PREFIX"] = TestProxy.prefix - - def setup_method(self): - create("-n", TestProxy.env_name, "--offline", no_dry_run=True) - - @classmethod - def teardown_class(cls): - os.environ["MAMBA_ROOT_PREFIX"] = TestProxy.current_root_prefix - os.environ["CONDA_PREFIX"] = TestProxy.current_prefix - - def teardown_method(self): - shutil.rmtree(TestProxy.root_prefix) +class MitmProxy: + def __init__(self, exe: Path, conf: Path, dump: Path): + self.exe = Path(exe).resolve() + self.conf = Path(conf).resolve() + self.dump = Path(dump).resolve() + self.process = None def start_proxy(self, port, options=[]): - assert self.proxy_process is None - script = Path(__file__).parent / "dump_proxy_connections.py" - self.proxy_process = subprocess.Popen( + assert self.process is None + self.process = subprocess.Popen( [ - TestProxy.mitm_exe, + self.exe, "--listen-port", str(port), "--scripts", - script, + str(__this_dir__ / "dump_proxy_connections.py"), "--set", - f"outfile={TestProxy.mitm_dump_path}", + f"outfile={self.dump}", "--set", - f"confdir={TestProxy.mitm_confdir}", + f"confdir={self.conf}", *options, ] ) # Wait until mitmproxy has generated its certificate or some tests might fail - while not (Path(TestProxy.mitm_confdir) / "mitmproxy-ca-cert.pem").exists(): + while not (Path(self.conf) / "mitmproxy-ca-cert.pem").exists(): time.sleep(1) def stop_proxy(self): - self.proxy_process.terminate() + self.process.terminate() try: - self.proxy_process.wait(3) - except TimeoutExpired: - self.proxy_process.kill() - self.proxy_process = None - - @pytest.mark.parametrize( - "auth", - [ - None, - "foo:bar", - "user%40example.com:pass", - ], + self.process.wait(3) + except subprocess.TimeoutExpired: + self.process.kill() + self.process = None + + +@pytest.mark.parametrize("auth", [None, "foo:bar", "user%40example.com:pass"]) +@pytest.mark.parametrize("ssl_verify", (True, False)) +def test_proxy_install( + mitmdump_exe, tmp_home, tmp_prefix, tmp_path, unused_tcp_port, auth, ssl_verify +): + """ + This test makes sure micromamba follows the proxy settings in .condarc + + It starts mitmproxy with the `dump_proxy_connections.py` script, which dumps all requested urls in a text file. + After that micromamba is used to install a package, while pointing it to that mitmproxy instance. Once + micromamba finished the proxy server is stopped and the urls micromamba requested are compared to the urls + mitmproxy intercepted, making sure that all the requests went through the proxy. + """ + + if auth is not None: + proxy_options = ["--proxyauth", urllib.parse.unquote(auth)] + proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port) + else: + proxy_options = [] + proxy_url = "http://localhost:{}".format(unused_tcp_port) + + proxy = MitmProxy( + exe=mitmdump_exe, + conf=(tmp_path / "mitmproxy-conf"), + dump=(tmp_path / "mitmproxy-dump"), ) - @pytest.mark.parametrize("ssl_verify", (True, False)) - def test_install(self, unused_tcp_port, auth, ssl_verify): - """ - This test makes sure micromamba follows the proxy settings in .condarc - - It starts mitmproxy with the `dump_proxy_connections.py` script, which dumps all requested urls in a text file. - After that micromamba is used to install a package, while pointing it to that mitmproxy instance. Once - micromamba finished the proxy server is stopped and the urls micromamba requested are compared to the urls - mitmproxy intercepted, making sure that all the requests went through the proxy. - """ - - if auth is not None: - proxy_options = ["--proxyauth", urllib.parse.unquote(auth)] - proxy_url = "http://{}@localhost:{}".format(auth, unused_tcp_port) - else: - proxy_options = [] - proxy_url = "http://localhost:{}".format(unused_tcp_port) - - self.start_proxy(unused_tcp_port, proxy_options) - - cmd = ["xtensor"] - f_name = random_string() + ".yaml" - rc_file = os.path.join(TestProxy.prefix, f_name) - - if ssl_verify: - verify_string = os.path.abspath( - os.path.join(TestProxy.mitm_confdir, "mitmproxy-ca-cert.pem") - ) - else: - verify_string = "false" - - file_content = [ - "proxy_servers:", - " http: {}".format(proxy_url), - " https: {}".format(proxy_url), - "ssl_verify: {}".format(verify_string), - ] - with open(rc_file, "w") as f: - f.write("\n".join(file_content)) - - cmd += ["--rc-file", rc_file] - - if os.name == "nt": - # The certificates generated by mitmproxy don't support revocation. - # The schannel backend curl uses on Windows fails revocation check if revocation isn't supported. Other - # backends succeed revocation check in that case. - cmd += ["--ssl-no-revoke"] - - res = install(*cmd, "--json", no_rc=False) - - self.stop_proxy() - - with open(TestProxy.mitm_dump_path, "r") as f: - proxied_requests = f.read().splitlines() - - for fetch in res["actions"]["FETCH"]: - assert fetch["url"] in proxied_requests + proxy.start_proxy(unused_tcp_port, proxy_options) + + rc_file = tmp_prefix / "rc.yaml" + verify_string = proxy.conf / "mitmproxy-ca-cert.pem" if ssl_verify else "false" + + file_content = [ + "proxy_servers:", + " http: {}".format(proxy_url), + " https: {}".format(proxy_url), + "ssl_verify: {}".format(verify_string), + ] + with open(rc_file, "w") as f: + f.write("\n".join(file_content)) + + cmd = ["xtensor", "--rc-file", rc_file] + if os.name == "nt": + # The certificates generated by mitmproxy don't support revocation. + # The schannel backend curl uses on Windows fails revocation check if revocation isn't supported. Other + # backends succeed revocation check in that case. + cmd += ["--ssl-no-revoke"] + + res = helpers.install(*cmd, "--json", no_rc=False) + + proxy.stop_proxy() + + with open(proxy.dump, "r") as f: + proxied_requests = f.read().splitlines() + + for fetch in res["actions"]["FETCH"]: + assert fetch["url"] in proxied_requests diff --git a/micromamba/tests/test_remove.py b/micromamba/tests/test_remove.py index 5122e2c5d9..61d81c569b 100644 --- a/micromamba/tests/test_remove.py +++ b/micromamba/tests/test_remove.py @@ -12,6 +12,8 @@ from .helpers import * +__this_dir__ = Path(__file__).parent.resolve() + @pytest.mark.skipif(dry_run_tests == DryRun.ULTRA_DRY, reason="Running ultra dry tests") class TestRemove: @@ -185,7 +187,6 @@ def test_remove_in_use(self, env_created): class TestRemoveConfig: - current_root_prefix = os.environ["MAMBA_ROOT_PREFIX"] current_prefix = os.environ["CONDA_PREFIX"] @@ -243,12 +244,9 @@ def test_specs(self, env_created): assert res["specs"] == specs def test_remove_then_clean(self, env_created): - from .test_create import test_env_requires_pip_install_path - + env_file = __this_dir__ / "env-requires-pip-install.yaml" env_name = "env_to_clean" - create( - "-n", env_name, "-f", test_env_requires_pip_install_path, no_dry_run=True - ) + create("-n", env_name, "-f", env_file, no_dry_run=True) remove("-n", env_name, "pip", no_dry_run=True) clean("-ay", no_dry_run=True) diff --git a/pyproject.toml b/pyproject.toml index 09ac72363a..b931a56a3d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,3 +4,7 @@ requires = [ 'setuptools >= 49.2.1', 'pybind11 >= 2.2', ] + +[tool.pytest.ini_options] +minversion = "6.0" +tmp_path_retention_policy = "failed"