From 1931b5fb4594790d6cd3e32f0d536cd1e9bd730b Mon Sep 17 00:00:00 2001 From: David Shrewsbury Date: Tue, 24 Sep 2024 12:07:59 -0400 Subject: [PATCH] Convert BaseConfig/RunnerConfig to dataclass --- src/ansible_runner/config/_base.py | 143 +++++++++++++--------------- src/ansible_runner/config/runner.py | 127 +++++++++++++----------- src/ansible_runner/runner.py | 2 +- test/integration/test_runner.py | 2 +- test/unit/config/test__base.py | 8 +- test/unit/config/test_runner.py | 34 +++---- 6 files changed, 162 insertions(+), 154 deletions(-) diff --git a/src/ansible_runner/config/_base.py b/src/ansible_runner/config/_base.py index 819640df..038644f1 100644 --- a/src/ansible_runner/config/_base.py +++ b/src/ansible_runner/config/_base.py @@ -29,6 +29,7 @@ import tempfile import shutil from base64 import b64encode +from dataclasses import dataclass, field from enum import Enum from uuid import uuid4 from collections.abc import Mapping @@ -61,69 +62,58 @@ class BaseExecutionMode(Enum): GENERIC_COMMANDS = 2 +@dataclass +class _ArgField(dict): + required: bool = True + + def __getattr__(self, attr): + return self[attr] + + +@dataclass class BaseConfig: - def __init__(self, - private_data_dir: str | None = None, - host_cwd: str | None = None, - envvars: dict[str, Any] | None = None, - passwords=None, - settings=None, - project_dir: str | None = None, - artifact_dir: str | None = None, - fact_cache_type: str = 'jsonfile', - fact_cache=None, - process_isolation: bool = False, - process_isolation_executable: str | None = None, - container_image: str = "", - container_volume_mounts=None, - container_options=None, - container_workdir: str | None = None, - container_auth_data=None, - ident: str | None = None, - rotate_artifacts: int = 0, - timeout: int | None = None, - ssh_key: str | None = None, - quiet: bool = False, - json_mode: bool = False, - check_job_event_data: bool = False, - suppress_env_files: bool = False, - keepalive_seconds: int | None = None - ): + private_data_dir: str | None = field(metadata=_ArgField(), default=None) + host_cwd: str | None = field(metadata=_ArgField(), default=None) + envvars: dict[str, Any] | None = field(metadata=_ArgField(), default=None) + passwords: dict[str, str] | None = field(metadata=_ArgField(), default=None) + settings: dict | None = field(metadata=_ArgField(), default=None) + project_dir: str | None = field(metadata=_ArgField(), default=None) + artifact_dir: str | None = field(metadata=_ArgField(), default=None) + fact_cache_type: str = field(metadata=_ArgField(), default='jsonfile') + fact_cache: str | None = field(metadata=_ArgField(), default=None) + process_isolation: bool = field(metadata=_ArgField(), default=False) + process_isolation_executable: str = field(metadata=_ArgField(), default=defaults.default_process_isolation_executable) + container_image: str = field(metadata=_ArgField(), default="") + container_volume_mounts: list[str] | None = field(metadata=_ArgField(), default=None) + container_options: list[str] | None = field(metadata=_ArgField(), default=None) + container_workdir: str | None = field(metadata=_ArgField(), default=None) + container_auth_data: dict[str, str] | None = field(metadata=_ArgField(), default=None) + ident: str | None = field(metadata=_ArgField(), default=None) + rotate_artifacts: int = field(metadata=_ArgField(), default=0) + timeout: int | None = field(metadata=_ArgField(), default=None) + ssh_key: str | None = field(metadata=_ArgField(), default=None) + quiet: bool = field(metadata=_ArgField(), default=False) + json_mode: bool = field(metadata=_ArgField(), default=False) + check_job_event_data: bool = field(metadata=_ArgField(), default=False) + suppress_env_files: bool = field(metadata=_ArgField(), default=False) + keepalive_seconds: int | None = field(metadata=_ArgField(), default=None) + + _CONTAINER_ENGINES = ('docker', 'podman') + + def __post_init__(self) -> None: # pylint: disable=W0613 - # common params - self.host_cwd = host_cwd - self.envvars = envvars - self.ssh_key_data = ssh_key self.command: list[str] = [] - - # container params - self.process_isolation = process_isolation - self.process_isolation_executable = process_isolation_executable or defaults.default_process_isolation_executable - self.container_image = container_image - self.container_volume_mounts = container_volume_mounts - self.container_workdir = container_workdir - self.container_auth_data = container_auth_data self.registry_auth_path: str self.container_name: str = "" # like other properties, not accurate until prepare is called - self.container_options = container_options - - # runner params - self.rotate_artifacts = rotate_artifacts - self.quiet = quiet - self.json_mode = json_mode - self.passwords = passwords - self.settings = settings - self.timeout = timeout - self.check_job_event_data = check_job_event_data - self.suppress_env_files = suppress_env_files + # ignore this for now since it's worker-specific and would just trip up old runners # self.keepalive_seconds = keepalive_seconds # setup initial environment - if private_data_dir: - self.private_data_dir = os.path.abspath(private_data_dir) + if self.private_data_dir: + self.private_data_dir = os.path.abspath(self.private_data_dir) # Note that os.makedirs, exist_ok=True is dangerous. If there's a directory writable # by someone other than the user anywhere in the path to be created, an attacker can # attempt to compromise the directories via a race. @@ -131,26 +121,22 @@ def __init__(self, else: self.private_data_dir = tempfile.mkdtemp(prefix=defaults.AUTO_CREATE_NAMING, dir=defaults.AUTO_CREATE_DIR) - if artifact_dir is None: - artifact_dir = os.path.join(self.private_data_dir, 'artifacts') + if self.artifact_dir is None: + self.artifact_dir = os.path.join(self.private_data_dir, 'artifacts') else: - artifact_dir = os.path.abspath(artifact_dir) + self.artifact_dir = os.path.abspath(self.artifact_dir) - if ident is None: + if self.ident is None: self.ident = str(uuid4()) else: - self.ident = str(ident) + self.ident = str(self.ident) - self.artifact_dir = os.path.join(artifact_dir, self.ident) + self.artifact_dir = os.path.join(self.artifact_dir, self.ident) - if not project_dir: + if not self.project_dir: self.project_dir = os.path.join(self.private_data_dir, 'project') - else: - self.project_dir = project_dir - self.rotate_artifacts = rotate_artifacts - self.fact_cache_type = fact_cache_type - self.fact_cache = os.path.join(self.artifact_dir, fact_cache or 'fact_cache') if self.fact_cache_type == 'jsonfile' else None + self.fact_cache = os.path.join(self.artifact_dir, self.fact_cache or 'fact_cache') if self.fact_cache_type == 'jsonfile' else None self.loader = ArtifactLoader(self.private_data_dir) @@ -162,12 +148,19 @@ def __init__(self, os.makedirs(self.artifact_dir, exist_ok=True, mode=0o700) - _CONTAINER_ENGINES = ('docker', 'podman') - @property def containerized(self): return self.process_isolation and self.process_isolation_executable in self._CONTAINER_ENGINES + @property + def ssh_key_data(self): + """ Alias for backward compatibility. """ + return self.ssh_key + + @ssh_key_data.setter + def ssh_key_data(self, value): + self.ssh_key = value + def prepare_env(self, runner_mode: str = 'pexpect') -> None: """ Manages reading environment metadata files under ``private_data_dir`` and merging/updating @@ -178,7 +171,7 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: if self.settings and isinstance(self.settings, dict): self.settings.update(self.loader.load_file('env/settings', Mapping)) # type: ignore else: - self.settings = self.loader.load_file('env/settings', Mapping) + self.settings = self.loader.load_file('env/settings', Mapping) # type: ignore except ConfigurationError: debug("Not loading settings") self.settings = {} @@ -188,11 +181,11 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: if self.passwords and isinstance(self.passwords, dict): self.passwords.update(self.loader.load_file('env/passwords', Mapping)) # type: ignore else: - self.passwords = self.passwords or self.loader.load_file('env/passwords', Mapping) + self.passwords = self.passwords or self.loader.load_file('env/passwords', Mapping) # type: ignore except ConfigurationError: debug('Not loading passwords') - self.expect_passwords = {} + self.expect_passwords: dict[Any, Any] = {} try: if self.passwords: self.expect_passwords = { @@ -268,16 +261,16 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: # Still need to pass default environment to pexpect try: - if self.ssh_key_data is None: - self.ssh_key_data = self.loader.load_file('env/ssh_key', str) # type: ignore + if self.ssh_key is None: + self.ssh_key = self.loader.load_file('env/ssh_key', str) # type: ignore except ConfigurationError: debug("Not loading ssh key") - self.ssh_key_data = None + self.ssh_key = None # write the SSH key data into a fifo read by ssh-agent - if self.ssh_key_data: + if self.ssh_key: self.ssh_key_path = os.path.join(self.artifact_dir, 'ssh_key_data') - open_fifo_write(self.ssh_key_path, self.ssh_key_data) + open_fifo_write(self.ssh_key_path, self.ssh_key) self.suppress_output_file = self.settings.get('suppress_output_file', False) self.suppress_ansible_output = self.settings.get('suppress_ansible_output', self.quiet) @@ -340,7 +333,7 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: debug(f' {k}: {v}') def handle_command_wrap(self, execution_mode: BaseExecutionMode, cmdline_args: list[str]) -> None: - if self.ssh_key_data: + if self.ssh_key: logger.debug('ssh key data added') self.command = self.wrap_args_with_ssh_agent(self.command, self.ssh_key_path) diff --git a/src/ansible_runner/config/runner.py b/src/ansible_runner/config/runner.py index 0633a23f..85d5677d 100644 --- a/src/ansible_runner/config/runner.py +++ b/src/ansible_runner/config/runner.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. # +from __future__ import annotations # pylint: disable=W0201 @@ -27,8 +28,10 @@ import tempfile import shutil +from dataclasses import dataclass, field + from ansible_runner import output -from ansible_runner.config._base import BaseConfig, BaseExecutionMode +from ansible_runner.config._base import _ArgField, BaseConfig, BaseExecutionMode from ansible_runner.exceptions import ConfigurationError from ansible_runner.output import debug from ansible_runner.utils import register_for_cleanup @@ -44,6 +47,7 @@ class ExecutionMode(): RAW = 3 +@dataclass class RunnerConfig(BaseConfig): """ A ``Runner`` configuration object that's meant to encapsulate the configuration used by the @@ -62,50 +66,61 @@ class RunnerConfig(BaseConfig): """ - def __init__(self, - private_data_dir, playbook=None, inventory=None, roles_path=None, limit=None, - module=None, module_args=None, verbosity=None, host_pattern=None, binary=None, - extravars=None, suppress_output_file=False, suppress_ansible_output=False, process_isolation_path=None, - process_isolation_hide_paths=None, process_isolation_show_paths=None, - process_isolation_ro_paths=None, tags=None, skip_tags=None, - directory_isolation_base_path=None, forks=None, cmdline=None, omit_event_data=False, - only_failed_event_data=False, **kwargs): - + # 'binary' comes from the --binary CLI opt for an alternative ansible command path + binary: str | None = field(metadata=_ArgField(), default=None) + cmdline: str | None = field(metadata=_ArgField(), default=None) + directory_isolation_base_path: str | None = field(metadata=_ArgField(), default=None) + extravars: dict | None = field(metadata=_ArgField(), default=None) + forks: int | None = field(metadata=_ArgField(), default=None) + host_pattern: str | None = field(metadata=_ArgField(), default=None) + inventory: str | dict | list | None = field(metadata=_ArgField(), default=None) + limit: str | None = field(metadata=_ArgField(), default=None) + module: str | None = field(metadata=_ArgField(), default=None) + module_args: str | None = field(metadata=_ArgField(), default=None) + omit_event_data: bool = field(metadata=_ArgField(), default=False) + only_failed_event_data: bool = field(metadata=_ArgField(), default=False) + playbook: str | None = field(metadata=_ArgField(), default=None) + process_isolation_hide_paths: str | list | None = field(metadata=_ArgField(), default=None) + process_isolation_ro_paths: str | list | None = field(metadata=_ArgField(), default=None) + process_isolation_show_paths: str | list | None = field(metadata=_ArgField(), default=None) + process_isolation_path: str | None = field(metadata=_ArgField(), default=None) + roles_path: str | None = field(metadata=_ArgField(), default=None) + skip_tags: str | None = field(metadata=_ArgField(), default=None) + suppress_ansible_output: bool = field(metadata=_ArgField(), default=False) + suppress_output_file: bool = field(metadata=_ArgField(), default=False) + tags: str | None = field(metadata=_ArgField(), default=None) + verbosity: int | None = field(metadata=_ArgField(), default=None) + + def __post_init__(self) -> None: + # NOTE: Cannot call base class __init__() here as that causes some recursion madness. + # We can call its __post_init__(). + super().__post_init__() # TODO: Should we rename this in base class? self.runner_mode = "pexpect" - - super().__init__(private_data_dir, **kwargs) - - self.playbook = playbook - self.inventory = inventory - self.roles_path = roles_path - self.limit = limit - self.module = module - self.module_args = module_args - self.host_pattern = host_pattern - self.binary = binary - self.extra_vars = extravars - self.process_isolation_path = process_isolation_path self.process_isolation_path_actual = None - self.process_isolation_hide_paths = process_isolation_hide_paths - self.process_isolation_show_paths = process_isolation_show_paths - self.process_isolation_ro_paths = process_isolation_ro_paths - self.directory_isolation_path = directory_isolation_base_path - self.verbosity = verbosity - self.suppress_output_file = suppress_output_file - self.suppress_ansible_output = suppress_ansible_output - self.tags = tags - self.skip_tags = skip_tags self.execution_mode = ExecutionMode.NONE - self.forks = forks - self.cmdline_args = cmdline - - self.omit_event_data = omit_event_data - self.only_failed_event_data = only_failed_event_data @property def sandboxed(self): return self.process_isolation and self.process_isolation_executable not in self._CONTAINER_ENGINES + @property + def directory_isolation_path(self): + """ Alias for backward compatibility. """ + return self.directory_isolation_base_path + + @directory_isolation_path.setter + def directory_isolation_path(self, value): + self.directory_isolation_base_path = value + + @property + def extra_vars(self): + """ Alias for backward compatibility. """ + return self.extravars + + @extra_vars.setter + def extra_vars(self, value): + self.extravars = value + def prepare(self): """ Performs basic checks and then properly invokes @@ -131,11 +146,11 @@ def prepare(self): # we must call prepare_env() before we can reference it. self.prepare_env() - if self.sandboxed and self.directory_isolation_path is not None: - self.directory_isolation_path = tempfile.mkdtemp(prefix='runner_di_', dir=self.directory_isolation_path) + if self.sandboxed and self.directory_isolation_base_path is not None: + self.directory_isolation_base_path = tempfile.mkdtemp(prefix='runner_di_', dir=self.directory_isolation_base_path) if os.path.exists(self.project_dir): - output.debug(f"Copying directory tree from {self.project_dir} to {self.directory_isolation_path} for working directory isolation") - shutil.copytree(self.project_dir, self.directory_isolation_path, dirs_exist_ok=True, symlinks=True) + output.debug(f"Copying directory tree from {self.project_dir} to {self.directory_isolation_base_path} for working directory isolation") + shutil.copytree(self.project_dir, self.directory_isolation_base_path, dirs_exist_ok=True, symlinks=True) self.prepare_inventory() self.prepare_command() @@ -186,14 +201,14 @@ def prepare_env(self): self.process_isolation_hide_paths = self.settings.get('process_isolation_hide_paths', self.process_isolation_hide_paths) self.process_isolation_show_paths = self.settings.get('process_isolation_show_paths', self.process_isolation_show_paths) self.process_isolation_ro_paths = self.settings.get('process_isolation_ro_paths', self.process_isolation_ro_paths) - self.directory_isolation_path = self.settings.get('directory_isolation_base_path', self.directory_isolation_path) + self.directory_isolation_base_path = self.settings.get('directory_isolation_base_path', self.directory_isolation_base_path) self.directory_isolation_cleanup = bool(self.settings.get('directory_isolation_cleanup', True)) if 'AD_HOC_COMMAND_ID' in self.env or not os.path.exists(self.project_dir): self.cwd = self.private_data_dir else: - if self.directory_isolation_path is not None: - self.cwd = self.directory_isolation_path + if self.directory_isolation_base_path is not None: + self.cwd = self.directory_isolation_base_path else: self.cwd = self.project_dir @@ -240,8 +255,8 @@ def generate_ansible_command(self): exec_list = [base_command] try: - if self.cmdline_args: - cmdline_args = self.cmdline_args + if self.cmdline: + cmdline_args = self.cmdline else: cmdline_args = self.loader.load_file('env/cmdline', str, encoding=None) @@ -271,11 +286,11 @@ def generate_ansible_command(self): extravars_path = self.loader.abspath('env/extravars') exec_list.extend(['-e', f'@{extravars_path}']) - if self.extra_vars: - if isinstance(self.extra_vars, dict) and self.extra_vars: + if self.extravars: + if isinstance(self.extravars, dict) and self.extravars: extra_vars_list = [] - for k in self.extra_vars: - extra_vars_list.append(f"\"{k}\":{json.dumps(self.extra_vars[k])}") + for k in self.extravars: + extra_vars_list.append(f"\"{k}\":{json.dumps(self.extravars[k])}") exec_list.extend( [ @@ -283,8 +298,8 @@ def generate_ansible_command(self): f'{{{",".join(extra_vars_list)}}}' ] ) - elif self.loader.isfile(self.extra_vars): - exec_list.extend(['-e', f'@{self.loader.abspath(self.extra_vars)}']) + elif self.loader.isfile(self.extravars): + exec_list.extend(['-e', f'@{self.loader.abspath(self.extravars)}']) if self.verbosity: v = 'v' * self.verbosity @@ -385,8 +400,8 @@ def wrap_args_for_sandbox(self, args): if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK: # playbook runs should cwd to the SCM checkout dir - if self.directory_isolation_path is not None: - new_args.extend(['--chdir', os.path.realpath(self.directory_isolation_path)]) + if self.directory_isolation_base_path is not None: + new_args.extend(['--chdir', os.path.realpath(self.directory_isolation_base_path)]) else: new_args.extend(['--chdir', os.path.realpath(self.project_dir)]) elif self.execution_mode == ExecutionMode.ANSIBLE: @@ -398,7 +413,7 @@ def wrap_args_for_sandbox(self, args): def handle_command_wrap(self): # wrap args for ssh-agent - if self.ssh_key_data: + if self.ssh_key: debug('ssh-agent agrs added') self.command = self.wrap_args_with_ssh_agent(self.command, self.ssh_key_path) @@ -413,6 +428,6 @@ def handle_command_wrap(self): # container volume mount is handled explicitly for run API's # using 'container_volume_mounts' arguments base_execution_mode = BaseExecutionMode.NONE - self.command = self.wrap_args_for_containerization(self.command, base_execution_mode, self.cmdline_args) + self.command = self.wrap_args_for_containerization(self.command, base_execution_mode, self.cmdline) else: debug('containerization disabled') diff --git a/src/ansible_runner/runner.py b/src/ansible_runner/runner.py index 06220547..06e4ad76 100644 --- a/src/ansible_runner/runner.py +++ b/src/ansible_runner/runner.py @@ -45,7 +45,7 @@ def __init__(self, config, cancel_callback=None, remove_partials=True, event_han # default runner mode to pexpect self.runner_mode = self.config.runner_mode if hasattr(self.config, 'runner_mode') else 'pexpect' - self.directory_isolation_path = self.config.directory_isolation_path if hasattr(self.config, 'directory_isolation_path') else None + self.directory_isolation_path = self.config.directory_isolation_base_path if hasattr(self.config, 'directory_isolation_path') else None self.directory_isolation_cleanup = self.config.directory_isolation_cleanup if hasattr(self.config, 'directory_isolation_cleanup') else None self.process_isolation = self.config.process_isolation if hasattr(self.config, 'process_isolation') else None self.process_isolation_path_actual = self.config.process_isolation_path_actual if hasattr(self.config, 'process_isolation_path_actual') else None diff --git a/test/integration/test_runner.py b/test/integration/test_runner.py index e00112b0..fb37bac5 100644 --- a/test/integration/test_runner.py +++ b/test/integration/test_runner.py @@ -271,7 +271,7 @@ def test_set_extra_vars(rc): rc.module = "debug" rc.module_args = "var=test_extra_vars" rc.host_pattern = "localhost" - rc.extra_vars = {'test_extra_vars': 'hello there'} + rc.extravars = {'test_extra_vars': 'hello there'} rc.prepare() runner = Runner(config=rc) runner.run() diff --git a/test/unit/config/test__base.py b/test/unit/config/test__base.py index 35df30e7..9c33e423 100644 --- a/test/unit/config/test__base.py +++ b/test/unit/config/test__base.py @@ -159,7 +159,7 @@ def test_prepare_env_settings(mocker): def test_prepare_env_sshkey_defaults(): rc = BaseConfig() rc.prepare_env() - assert rc.ssh_key_data is None + assert rc.ssh_key is None def test_prepare_env_sshkey(mocker): @@ -171,7 +171,7 @@ def test_prepare_env_sshkey(mocker): mocker.patch.object(rc.loader, 'load_file', side_effect=sshkey_side_effect) rc.prepare_env() - assert rc.ssh_key_data == rsa_private_key_value + assert rc.ssh_key == rsa_private_key_value def test_prepare_env_defaults(): @@ -191,7 +191,7 @@ def test_prepare_env_ansible_vars(mocker, tmp_path): artifact_dir = tmp_path.joinpath('some_artifacts') rc = BaseConfig(artifact_dir=artifact_dir.as_posix()) - rc.ssh_key_data = None + rc.ssh_key = None rc.env = {} rc.execution_mode = BaseExecutionMode.ANSIBLE_COMMANDS @@ -215,7 +215,7 @@ def test_prepare_with_ssh_key(mocker, tmp_path): rc.env = {} rc.execution_mode = BaseExecutionMode.ANSIBLE_COMMANDS rsa_key = RSAKey() - rc.ssh_key_data = rsa_key.private + rc.ssh_key = rsa_key.private rc.command = 'ansible-playbook' rc.cmdline_args = [] rc.prepare_env() diff --git a/test/unit/config/test_runner.py b/test/unit/config/test_runner.py index ea4ca97f..6100d3b7 100644 --- a/test/unit/config/test_runner.py +++ b/test/unit/config/test_runner.py @@ -139,7 +139,7 @@ def test_prepare_env_extra_vars_defaults(mocker): rc = RunnerConfig('/') rc.prepare_env() - assert rc.extra_vars is None + assert rc.extravars is None def test_prepare_env_settings_defaults(mocker): @@ -169,7 +169,7 @@ def test_prepare_env_sshkey_defaults(mocker): rc = RunnerConfig('/') rc.prepare_env() - assert rc.ssh_key_data is None + assert rc.ssh_key is None def test_prepare_env_sshkey(mocker): @@ -184,7 +184,7 @@ def test_prepare_env_sshkey(mocker): mocker.patch.object(rc.loader, 'load_file', side_effect=sshkey_side_effect) rc.prepare_env() - assert rc.ssh_key_data == rsa_private_key_value + assert rc.ssh_key == rsa_private_key_value def test_prepare_env_defaults(mocker): @@ -206,7 +206,7 @@ def test_prepare_env_directory_isolation(mocker): path_exists.return_value = True rc = RunnerConfig('/') - rc.directory_isolation_path = '/tmp/foo' + rc.directory_isolation_base_path = '/tmp/foo' rc.prepare_env() assert rc.cwd == '/tmp/foo' @@ -235,11 +235,11 @@ def test_prepare_env_directory_isolation_from_settings(mocker, project_fixtures) assert os.path.exists(rc.project_dir) # `directory_isolation_path` should be used to create a new temp path underneath - assert rc.directory_isolation_path == '/tmp/runner/runner_di_XYZ' + assert rc.directory_isolation_base_path == '/tmp/runner/runner_di_XYZ' mkdtemp.assert_called_once_with(prefix='runner_di_', dir='/tmp/runner') # The project files should be copied to the isolation path. - copy_tree.assert_called_once_with(rc.project_dir, rc.directory_isolation_path, dirs_exist_ok=True, symlinks=True) + copy_tree.assert_called_once_with(rc.project_dir, rc.directory_isolation_base_path, dirs_exist_ok=True, symlinks=True) def test_prepare_inventory(mocker): @@ -282,7 +282,7 @@ def test_generate_ansible_command_extra_vars(mocker, extra_vars, expected): mocker.patch.object(rc.loader, 'isfile', side_effect=lambda x: True) - rc.extra_vars = extra_vars + rc.extravars = extra_vars cmd = rc.generate_ansible_command() assert cmd == expected @@ -293,15 +293,15 @@ def test_generate_ansible_command(mocker): rc = RunnerConfig(private_data_dir='/', playbook='main.yaml') rc.prepare_inventory() - rc.extra_vars = None + rc.extravars = None cmd = rc.generate_ansible_command() assert cmd == ['ansible-playbook', '-i', '/inventory', 'main.yaml'] - rc.extra_vars = {'test': 'key'} + rc.extravars = {'test': 'key'} cmd = rc.generate_ansible_command() assert cmd == ['ansible-playbook', '-i', '/inventory', '-e', '{"test":"key"}', 'main.yaml'] - rc.extra_vars = None + rc.extravars = None rc.inventory = "localhost," cmd = rc.generate_ansible_command() @@ -386,7 +386,7 @@ def test_generate_ansible_command_with_cmdline_args(cmdline, tokens, mocker): path_exists.return_value = True rc.prepare_inventory() - rc.extra_vars = {} + rc.extravars = {} cmdline_side_effect = partial(load_file_side_effect, 'env/cmdline', cmdline) mocker.patch.object(rc.loader, 'load_file', side_effect=cmdline_side_effect) @@ -421,7 +421,7 @@ def test_prepare_with_defaults(mocker): rc.prepare_env = mocker.Mock() rc.prepare_command = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} @@ -440,7 +440,7 @@ def test_prepare(mocker): rc = RunnerConfig('/') rc.prepare_inventory = mocker.Mock() rc.prepare_command = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} rc.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK @@ -470,13 +470,13 @@ def test_prepare_with_ssh_key(mocker): rc.wrap_args_with_ssh_agent = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} rc.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK rc.playbook = 'main.yaml' rsa_key = RSAKey() - rc.ssh_key_data = rsa_key.private + rc.ssh_key = rsa_key.private rc.command = 'ansible-playbook' mocker.patch.dict('os.environ', {'AWX_LIB_DIRECTORY': '/'}) @@ -583,7 +583,7 @@ def isfile(self, _): rc = RunnerConfig('/') rc.artifact_dir = tmp_path / 'artifacts' - rc.directory_isolation_path = tmp_path / 'dirisolation' + rc.directory_isolation_base_path = tmp_path / 'dirisolation' rc.playbook = 'main.yaml' rc.command = 'ansible-playbook' rc.process_isolation = True @@ -605,7 +605,7 @@ def isfile(self, _): '--symlink', 'usr/lib', '/lib', '--symlink', 'usr/lib64', '/lib64', '--bind', '/', '/', - '--chdir', os.path.realpath(rc.directory_isolation_path), + '--chdir', os.path.realpath(rc.directory_isolation_base_path), 'ansible-playbook', '-i', '/inventory', 'main.yaml', ]