diff --git a/coriolis/api/v1/migration_actions.py b/coriolis/api/v1/deployment_actions.py similarity index 54% rename from coriolis/api/v1/migration_actions.py rename to coriolis/api/v1/deployment_actions.py index 64bd5b7d4..c7f5034f3 100644 --- a/coriolis/api/v1/migration_actions.py +++ b/coriolis/api/v1/deployment_actions.py @@ -1,27 +1,27 @@ -# Copyright 2016 Cloudbase Solutions Srl +# Copyright 2024 Cloudbase Solutions Srl # All Rights Reserved. +from webob import exc + from coriolis.api import wsgi as api_wsgi +from coriolis.deployments import api from coriolis import exception -from coriolis.migrations import api -from coriolis.policies import migrations as migration_policies - -from webob import exc +from coriolis.policies import deployments as deployment_policies -class MigrationActionsController(api_wsgi.Controller): +class DeploymentActionsController(api_wsgi.Controller): def __init__(self): - self._migration_api = api.API() - super(MigrationActionsController, self).__init__() + self._deployment_api = api.API() + super(DeploymentActionsController, self).__init__() @api_wsgi.action('cancel') def _cancel(self, req, id, body): context = req.environ['coriolis.context'] - context.can(migration_policies.get_migrations_policy_label("cancel")) + context.can(deployment_policies.get_deployments_policy_label("cancel")) try: force = (body["cancel"] or {}).get("force", False) - self._migration_api.cancel(context, id, force) + self._deployment_api.cancel(context, id, force) raise exc.HTTPNoContent() except exception.NotFound as ex: raise exc.HTTPNotFound(explanation=ex.msg) @@ -30,4 +30,4 @@ def _cancel(self, req, id, body): def create_resource(): - return api_wsgi.Resource(MigrationActionsController()) + return api_wsgi.Resource(DeploymentActionsController()) diff --git a/coriolis/api/v1/deployments.py b/coriolis/api/v1/deployments.py new file mode 100644 index 000000000..bc682d9fb --- /dev/null +++ b/coriolis/api/v1/deployments.py @@ -0,0 +1,120 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +from oslo_config import cfg as conf +from oslo_log import log as logging +from webob import exc + +from coriolis.api.v1 import utils as api_utils +from coriolis.api.v1.views import deployment_view +from coriolis.api import wsgi as api_wsgi +from coriolis.deployments import api +from coriolis.endpoints import api as endpoints_api +from coriolis import exception +from coriolis.policies import deployments as deployment_policies + +DEPLOYMENTS_API_OPTS = [ + conf.BoolOpt("include_task_info_in_deployments_api", + default=False, + help="Whether or not to expose the internal 'info' field of " + "a Deployment as part of a `GET` request.")] + +CONF = conf.CONF +CONF.register_opts(DEPLOYMENTS_API_OPTS, 'api') + +LOG = logging.getLogger(__name__) + + +class DeploymentsController(api_wsgi.Controller): + def __init__(self): + self._deployment_api = api.API() + self._endpoints_api = endpoints_api.API() + super(DeploymentsController, self).__init__() + + def show(self, req, id): + context = req.environ["coriolis.context"] + context.can(deployment_policies.get_deployments_policy_label("show")) + deployment = self._deployment_api.get_deployment( + context, id, + include_task_info=CONF.api.include_task_info_in_deployments_api) + if not deployment: + raise exc.HTTPNotFound() + + return deployment_view.single(deployment) + + def _list(self, req): + show_deleted = api_utils._get_show_deleted( + req.GET.get("show_deleted", None)) + context = req.environ["coriolis.context"] + context.show_deleted = show_deleted + context.can(deployment_policies.get_deployments_policy_label("list")) + return deployment_view.collection( + self._deployment_api.get_deployments( + context, + include_tasks=CONF.api.include_task_info_in_deployments_api, + include_task_info=CONF.api.include_task_info_in_deployments_api + )) + + def index(self, req): + return self._list(req) + + def detail(self, req): + return self._list(req) + + @api_utils.format_keyerror_message(resource='deployment', method='create') + def _validate_deployment_input(self, context, body): + deployment = body["deployment"] + + transfer_id = deployment.get("transfer_id", "") + + if not transfer_id: + raise exc.HTTPBadRequest( + explanation="Missing 'transfer_id' field from deployment " + "body. A deployment can be created strictly " + "based on an existing Transfer.") + + clone_disks = deployment.get("clone_disks", True) + force = deployment.get("force", False) + skip_os_morphing = deployment.get("skip_os_morphing", False) + instance_osmorphing_minion_pool_mappings = deployment.get( + 'instance_osmorphing_minion_pool_mappings', {}) + user_scripts = deployment.get('user_scripts', {}) + api_utils.validate_user_scripts(user_scripts) + user_scripts = api_utils.normalize_user_scripts( + user_scripts, deployment.get("instances", [])) + + return ( + transfer_id, force, clone_disks, skip_os_morphing, + instance_osmorphing_minion_pool_mappings, + user_scripts) + + def create(self, req, body): + context = req.environ['coriolis.context'] + context.can(deployment_policies.get_deployments_policy_label("create")) + + (transfer_id, force, clone_disks, skip_os_morphing, + instance_osmorphing_minion_pool_mappings, + user_scripts) = self._validate_deployment_input( + context, body) + + # NOTE: destination environment for transfer should have been + # validated upon its creation. + deployment = self._deployment_api.deploy_transfer_instances( + context, transfer_id, instance_osmorphing_minion_pool_mappings, + clone_disks, force, skip_os_morphing, + user_scripts=user_scripts) + + return deployment_view.single(deployment) + + def delete(self, req, id): + context = req.environ['coriolis.context'] + context.can(deployment_policies.get_deployments_policy_label("delete")) + try: + self._deployment_api.delete(context, id) + raise exc.HTTPNoContent() + except exception.NotFound as ex: + raise exc.HTTPNotFound(explanation=ex.msg) + + +def create_resource(): + return api_wsgi.Resource(DeploymentsController()) diff --git a/coriolis/api/v1/migrations.py b/coriolis/api/v1/migrations.py deleted file mode 100644 index 403b843e8..000000000 --- a/coriolis/api/v1/migrations.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.api.v1 import utils as api_utils -from coriolis.api.v1.views import migration_view -from coriolis.api import wsgi as api_wsgi -from coriolis.endpoints import api as endpoints_api -from coriolis import exception -from coriolis.migrations import api -from coriolis.policies import migrations as migration_policies - -from oslo_config import cfg as conf -from oslo_log import log as logging -from webob import exc - - -MIGRATIONS_API_OPTS = [ - conf.BoolOpt("include_task_info_in_migrations_api", - default=False, - help="Whether or not to expose the internal 'info' field of " - "a Migration as part of a `GET` request.")] - -CONF = conf.CONF -CONF.register_opts(MIGRATIONS_API_OPTS, 'api') - -LOG = logging.getLogger(__name__) - - -class MigrationController(api_wsgi.Controller): - def __init__(self): - self._migration_api = api.API() - self._endpoints_api = endpoints_api.API() - super(MigrationController, self).__init__() - - def show(self, req, id): - context = req.environ["coriolis.context"] - context.can(migration_policies.get_migrations_policy_label("show")) - migration = self._migration_api.get_migration( - context, id, - include_task_info=CONF.api.include_task_info_in_migrations_api) - if not migration: - raise exc.HTTPNotFound() - - return migration_view.single(migration) - - def _list(self, req): - show_deleted = api_utils._get_show_deleted( - req.GET.get("show_deleted", None)) - context = req.environ["coriolis.context"] - context.show_deleted = show_deleted - context.can(migration_policies.get_migrations_policy_label("list")) - return migration_view.collection( - self._migration_api.get_migrations( - context, - include_tasks=CONF.api.include_task_info_in_migrations_api, - include_task_info=CONF.api.include_task_info_in_migrations_api - )) - - def index(self, req): - return self._list(req) - - def detail(self, req): - return self._list(req) - - @api_utils.format_keyerror_message(resource='migration', method='create') - def _validate_migration_input(self, context, body): - migration = body["migration"] - origin_endpoint_id = migration["origin_endpoint_id"] - destination_endpoint_id = migration["destination_endpoint_id"] - origin_minion_pool_id = migration.get('origin_minion_pool_id') - destination_minion_pool_id = migration.get( - 'destination_minion_pool_id') - instance_osmorphing_minion_pool_mappings = migration.get( - 'instance_osmorphing_minion_pool_mappings', {}) - instances = api_utils.validate_instances_list_for_transfer( - migration.get('instances')) - extras = [ - instance - for instance in instance_osmorphing_minion_pool_mappings - if instance not in instances] - if extras: - raise ValueError( - "One or more instance OSMorphing pool mappings were " - "provided for instances (%s) which are not part of the " - "migration's declared instances (%s)" % (extras, instances)) - - notes = migration.get("notes") - skip_os_morphing = migration.get("skip_os_morphing", False) - shutdown_instances = migration.get( - "shutdown_instances", False) - replication_count = int(migration.get("replication_count", 2)) - if replication_count not in range(1, 11): - raise ValueError( - "'replication_count' must be an integer between 1 and 10." - " Got: %s" % replication_count) - - source_environment = migration.get("source_environment", {}) - self._endpoints_api.validate_source_environment( - context, origin_endpoint_id, source_environment) - - network_map = migration.get("network_map", {}) - api_utils.validate_network_map(network_map) - - # TODO(aznashwan): until the provider plugin interface is updated - # to have separate 'network_map' and 'storage_mappings' fields, - # we add them as part of the destination environment: - destination_environment = migration.get( - "destination_environment", {}) - destination_environment['network_map'] = network_map - self._endpoints_api.validate_target_environment( - context, destination_endpoint_id, destination_environment) - - storage_mappings = migration.get("storage_mappings", {}) - api_utils.validate_storage_mappings(storage_mappings) - # NOTE(aznashwan): we validate the destination environment for the - # import provider before appending the 'storage_mappings' parameter - # for plugins with strict property name checks which do not yet - # support storage mapping features: - destination_environment['storage_mappings'] = storage_mappings - - return (origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, source_environment, - destination_environment, instances, notes, - skip_os_morphing, replication_count, - shutdown_instances, network_map, storage_mappings) - - def create(self, req, body): - migration_body = body.get("migration", {}) - context = req.environ['coriolis.context'] - context.can(migration_policies.get_migrations_policy_label("create")) - user_scripts = migration_body.get('user_scripts', {}) - api_utils.validate_user_scripts(user_scripts) - user_scripts = api_utils.normalize_user_scripts( - user_scripts, migration_body.get("instances", [])) - replica_id = migration_body.get("replica_id") - if replica_id: - clone_disks = migration_body.get("clone_disks", True) - force = migration_body.get("force", False) - skip_os_morphing = migration_body.get("skip_os_morphing", False) - instance_osmorphing_minion_pool_mappings = migration_body.get( - 'instance_osmorphing_minion_pool_mappings', {}) - - # NOTE: destination environment for replica should have been - # validated upon its creation. - migration = self._migration_api.deploy_replica_instances( - context, replica_id, instance_osmorphing_minion_pool_mappings, - clone_disks, force, skip_os_morphing, - user_scripts=user_scripts) - else: - (origin_endpoint_id, - destination_endpoint_id, - origin_minion_pool_id, - destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, - destination_environment, - instances, - notes, - skip_os_morphing, - replication_count, - shutdown_instances, - network_map, - storage_mappings) = self._validate_migration_input( - context, body) - migration = self._migration_api.migrate_instances( - context, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, instances, - network_map, storage_mappings, replication_count, - shutdown_instances, notes=notes, - skip_os_morphing=skip_os_morphing, - user_scripts=user_scripts) - - return migration_view.single(migration) - - def delete(self, req, id): - context = req.environ['coriolis.context'] - context.can(migration_policies.get_migrations_policy_label("delete")) - try: - self._migration_api.delete(context, id) - raise exc.HTTPNoContent() - except exception.NotFound as ex: - raise exc.HTTPNotFound(explanation=ex.msg) - - -def create_resource(): - return api_wsgi.Resource(MigrationController()) diff --git a/coriolis/api/v1/replica_tasks_executions.py b/coriolis/api/v1/replica_tasks_executions.py deleted file mode 100644 index b755e4866..000000000 --- a/coriolis/api/v1/replica_tasks_executions.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.api.v1.views import replica_tasks_execution_view -from coriolis.api import wsgi as api_wsgi -from coriolis import exception -from coriolis.policies import replica_tasks_executions as executions_policies -from coriolis.replica_tasks_executions import api - -from webob import exc - - -class ReplicaTasksExecutionController(api_wsgi.Controller): - def __init__(self): - self._replica_tasks_execution_api = api.API() - super(ReplicaTasksExecutionController, self).__init__() - - def show(self, req, replica_id, id): - context = req.environ["coriolis.context"] - context.can( - executions_policies.get_replica_executions_policy_label("show")) - execution = self._replica_tasks_execution_api.get_execution( - context, replica_id, id) - if not execution: - raise exc.HTTPNotFound() - - return replica_tasks_execution_view.single(execution) - - def index(self, req, replica_id): - context = req.environ["coriolis.context"] - context.can( - executions_policies.get_replica_executions_policy_label("list")) - - return replica_tasks_execution_view.collection( - self._replica_tasks_execution_api.get_executions( - context, replica_id, include_tasks=False)) - - def detail(self, req, replica_id): - context = req.environ["coriolis.context"] - context.can( - executions_policies.get_replica_executions_policy_label("show")) - - return replica_tasks_execution_view.collection( - self._replica_tasks_execution_api.get_executions( - context, replica_id, include_tasks=True)) - - def create(self, req, replica_id, body): - context = req.environ["coriolis.context"] - context.can( - executions_policies.get_replica_executions_policy_label("create")) - - # TODO(alexpilotti): validate body - - execution_body = body.get("execution", {}) - shutdown_instances = execution_body.get("shutdown_instances", False) - - return replica_tasks_execution_view.single( - self._replica_tasks_execution_api.create( - context, replica_id, shutdown_instances)) - - def delete(self, req, replica_id, id): - context = req.environ["coriolis.context"] - context.can( - executions_policies.get_replica_executions_policy_label("delete")) - - try: - self._replica_tasks_execution_api.delete(context, replica_id, id) - raise exc.HTTPNoContent() - except exception.NotFound as ex: - raise exc.HTTPNotFound(explanation=ex.msg) - - -def create_resource(): - return api_wsgi.Resource(ReplicaTasksExecutionController()) diff --git a/coriolis/api/v1/router.py b/coriolis/api/v1/router.py index 94286f234..352979571 100644 --- a/coriolis/api/v1/router.py +++ b/coriolis/api/v1/router.py @@ -4,6 +4,8 @@ from oslo_log import log as logging from coriolis import api +from coriolis.api.v1 import deployment_actions +from coriolis.api.v1 import deployments from coriolis.api.v1 import diagnostics from coriolis.api.v1 import endpoint_actions from coriolis.api.v1 import endpoint_destination_minion_pool_options @@ -14,19 +16,17 @@ from coriolis.api.v1 import endpoint_source_options from coriolis.api.v1 import endpoint_storage from coriolis.api.v1 import endpoints -from coriolis.api.v1 import migration_actions -from coriolis.api.v1 import migrations from coriolis.api.v1 import minion_pool_actions from coriolis.api.v1 import minion_pools from coriolis.api.v1 import provider_schemas from coriolis.api.v1 import providers from coriolis.api.v1 import regions -from coriolis.api.v1 import replica_actions -from coriolis.api.v1 import replica_schedules -from coriolis.api.v1 import replica_tasks_execution_actions -from coriolis.api.v1 import replica_tasks_executions -from coriolis.api.v1 import replicas from coriolis.api.v1 import services +from coriolis.api.v1 import transfer_actions +from coriolis.api.v1 import transfer_schedules +from coriolis.api.v1 import transfer_tasks_execution_actions +from coriolis.api.v1 import transfer_tasks_executions +from coriolis.api.v1 import transfers LOG = logging.getLogger(__name__) @@ -139,59 +139,61 @@ def _setup_routes(self, mapper, ext_mgr): 'providers/{platform_name}/schemas/{provider_type}', controller=self.resources['provider_schemas']) - self.resources['migrations'] = migrations.create_resource() - mapper.resource('migration', 'migrations', - controller=self.resources['migrations'], + self.resources['deployments'] = deployments.create_resource() + mapper.resource('deployment', 'deployments', + controller=self.resources['deployments'], collection={'detail': 'GET'}, member={'action': 'POST'}) - migration_actions_resource = migration_actions.create_resource() - self.resources['migration_actions'] = migration_actions_resource - migration_path = '/{project_id}/migrations/{id}' - mapper.connect('migration_actions', - migration_path + '/actions', - controller=self.resources['migration_actions'], + deployments_actions_resource = deployment_actions.create_resource() + self.resources['deployment_actions'] = deployments_actions_resource + deployment_path = '/{project_id}/deployments/{id}' + mapper.connect('deployment_actions', + deployment_path + '/actions', + controller=self.resources['deployment_actions'], action='action', conditions={'method': 'POST'}) - self.resources['replicas'] = replicas.create_resource() - mapper.resource('replica', 'replicas', - controller=self.resources['replicas'], + self.resources['transfers'] = transfers.create_resource() + mapper.resource('transfer', 'transfers', + controller=self.resources['transfers'], collection={'detail': 'GET'}, member={'action': 'POST'}) - replica_actions_resource = replica_actions.create_resource() - self.resources['replica_actions'] = replica_actions_resource - migration_path = '/{project_id}/replicas/{id}' - mapper.connect('replica_actions', + transfer_actions_resource = transfer_actions.create_resource() + self.resources['transfer_actions'] = transfer_actions_resource + migration_path = '/{project_id}/transfers/{id}' + mapper.connect('transfer_actions', migration_path + '/actions', - controller=self.resources['replica_actions'], + controller=self.resources['transfer_actions'], action='action', conditions={'method': 'POST'}) - self.resources['replica_tasks_executions'] = \ - replica_tasks_executions.create_resource() - mapper.resource('execution', 'replicas/{replica_id}/executions', - controller=self.resources['replica_tasks_executions'], + self.resources['transfer_tasks_executions'] = \ + transfer_tasks_executions.create_resource() + mapper.resource('execution', 'transfers/{transfer_id}/executions', + controller=self.resources['transfer_tasks_executions'], collection={'detail': 'GET'}, member={'action': 'POST'}) - replica_tasks_execution_actions_resource = \ - replica_tasks_execution_actions.create_resource() - self.resources['replica_tasks_execution_actions'] = \ - replica_tasks_execution_actions_resource - migration_path = '/{project_id}/replicas/{replica_id}/executions/{id}' - mapper.connect('replica_tasks_execution_actions', + transfer_tasks_execution_actions_resource = \ + transfer_tasks_execution_actions.create_resource() + self.resources['transfer_tasks_execution_actions'] = \ + transfer_tasks_execution_actions_resource + migration_path = ('/{project_id}/transfers/{transfer_id}/' + 'executions/{id}') + mapper.connect('transfer_tasks_execution_actions', migration_path + '/actions', controller=self.resources[ - 'replica_tasks_execution_actions'], + 'transfer_tasks_execution_actions'], action='action', conditions={'method': 'POST'}) - sched = replica_schedules.create_resource() - self.resources['replica_schedules'] = sched - mapper.resource('replica_schedule', 'replicas/{replica_id}/schedules', - controller=self.resources['replica_schedules'], + sched = transfer_schedules.create_resource() + self.resources['transfer_schedules'] = sched + mapper.resource('transfer_schedule', + 'transfers/{transfer_id}/schedules', + controller=self.resources['transfer_schedules'], collection={'index': 'GET'}, member={'action': 'POST'}) diff --git a/coriolis/api/v1/replica_actions.py b/coriolis/api/v1/transfer_actions.py similarity index 50% rename from coriolis/api/v1/replica_actions.py rename to coriolis/api/v1/transfer_actions.py index adb82a26f..487e00994 100644 --- a/coriolis/api/v1/replica_actions.py +++ b/coriolis/api/v1/transfer_actions.py @@ -1,28 +1,28 @@ # Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. -from coriolis.api.v1.views import replica_tasks_execution_view +from coriolis.api.v1.views import transfer_tasks_execution_view from coriolis.api import wsgi as api_wsgi from coriolis import exception -from coriolis.policies import replicas as replica_policies -from coriolis.replicas import api +from coriolis.policies import transfers as transfer_policies +from coriolis.transfers import api from webob import exc -class ReplicaActionsController(api_wsgi.Controller): +class TransferActionsController(api_wsgi.Controller): def __init__(self): - self._replica_api = api.API() - super(ReplicaActionsController, self).__init__() + self._transfer_api = api.API() + super(TransferActionsController, self).__init__() @api_wsgi.action('delete-disks') def _delete_disks(self, req, id, body): context = req.environ['coriolis.context'] context.can( - replica_policies.get_replicas_policy_label("delete_disks")) + transfer_policies.get_transfers_policy_label("delete_disks")) try: - return replica_tasks_execution_view.single( - self._replica_api.delete_disks(context, id)) + return transfer_tasks_execution_view.single( + self._transfer_api.delete_disks(context, id)) except exception.NotFound as ex: raise exc.HTTPNotFound(explanation=ex.msg) except exception.InvalidParameterValue as ex: @@ -30,4 +30,4 @@ def _delete_disks(self, req, id, body): def create_resource(): - return api_wsgi.Resource(ReplicaActionsController()) + return api_wsgi.Resource(TransferActionsController()) diff --git a/coriolis/api/v1/replica_schedules.py b/coriolis/api/v1/transfer_schedules.py similarity index 68% rename from coriolis/api/v1/replica_schedules.py rename to coriolis/api/v1/transfer_schedules.py index 32a3e1d02..2ea70ce9f 100644 --- a/coriolis/api/v1/replica_schedules.py +++ b/coriolis/api/v1/transfer_schedules.py @@ -1,12 +1,12 @@ # Copyright 2017 Cloudbase Solutions Srl # All Rights Reserved. -from coriolis.api.v1.views import replica_schedule_view +from coriolis.api.v1.views import transfer_schedule_view from coriolis.api import wsgi as api_wsgi from coriolis import exception -from coriolis.policies import replica_schedules as schedules_policies -from coriolis.replica_cron import api +from coriolis.policies import transfer_schedules as schedules_policies from coriolis import schemas +from coriolis.transfer_cron import api import jsonschema from oslo_log import log as logging @@ -18,31 +18,31 @@ LOG = logging.getLogger(__name__) -class ReplicaScheduleController(api_wsgi.Controller): +class TransferScheduleController(api_wsgi.Controller): def __init__(self): self._schedule_api = api.API() - super(ReplicaScheduleController, self).__init__() + super(TransferScheduleController, self).__init__() - def show(self, req, replica_id, id): + def show(self, req, transfer_id, id): context = req.environ["coriolis.context"] context.can( - schedules_policies.get_replica_schedules_policy_label("show")) - schedule = self._schedule_api.get_schedule(context, replica_id, id) + schedules_policies.get_transfer_schedules_policy_label("show")) + schedule = self._schedule_api.get_schedule(context, transfer_id, id) if not schedule: raise exc.HTTPNotFound() - return replica_schedule_view.single(schedule) + return transfer_schedule_view.single(schedule) - def index(self, req, replica_id): + def index(self, req, transfer_id): context = req.environ["coriolis.context"] context.can( - schedules_policies.get_replica_schedules_policy_label("list")) + schedules_policies.get_transfer_schedules_policy_label("list")) show_expired = strutils.bool_from_string( req.GET.get("show_expired", True), strict=True) - return replica_schedule_view.collection( + return transfer_schedule_view.collection( self._schedule_api.get_schedules( - context, replica_id, expired=show_expired)) + context, transfer_id, expired=show_expired)) def _validate_schedule(self, schedule): schema = schemas.SCHEDULE_API_BODY_SCHEMA["properties"]["schedule"] @@ -100,45 +100,45 @@ def _validate_update_body(self, update_body): body["expiration_date"] = exp return body - def create(self, req, replica_id, body): + def create(self, req, transfer_id, body): context = req.environ["coriolis.context"] context.can( - schedules_policies.get_replica_schedules_policy_label("create")) + schedules_policies.get_transfer_schedules_policy_label("create")) - LOG.debug("Got request: %r %r %r" % (req, replica_id, body)) + LOG.debug("Got request: %r %r %r" % (req, transfer_id, body)) try: schedule, enabled, exp_date, shutdown = self._validate_create_body( body) except Exception as err: raise exception.InvalidInput(err) - return replica_schedule_view.single(self._schedule_api.create( - context, replica_id, schedule, enabled, exp_date, shutdown)) + return transfer_schedule_view.single(self._schedule_api.create( + context, transfer_id, schedule, enabled, exp_date, shutdown)) - def update(self, req, replica_id, id, body): + def update(self, req, transfer_id, id, body): context = req.environ["coriolis.context"] context.can( - schedules_policies.get_replica_schedules_policy_label("update")) + schedules_policies.get_transfer_schedules_policy_label("update")) LOG.debug("Got request: %r %r %r %r" % ( - req, replica_id, id, body)) + req, transfer_id, id, body)) try: update_values = self._validate_update_body(body) except Exception as err: raise exception.InvalidInput(err) - return replica_schedule_view.single(self._schedule_api.update( - context, replica_id, id, update_values)) + return transfer_schedule_view.single(self._schedule_api.update( + context, transfer_id, id, update_values)) - def delete(self, req, replica_id, id): + def delete(self, req, transfer_id, id): context = req.environ["coriolis.context"] context.can( - schedules_policies.get_replica_schedules_policy_label("delete")) + schedules_policies.get_transfer_schedules_policy_label("delete")) - self._schedule_api.delete(context, replica_id, id) + self._schedule_api.delete(context, transfer_id, id) raise exc.HTTPNoContent() def create_resource(): - return api_wsgi.Resource(ReplicaScheduleController()) + return api_wsgi.Resource(TransferScheduleController()) diff --git a/coriolis/api/v1/replica_tasks_execution_actions.py b/coriolis/api/v1/transfer_tasks_execution_actions.py similarity index 50% rename from coriolis/api/v1/replica_tasks_execution_actions.py rename to coriolis/api/v1/transfer_tasks_execution_actions.py index b7bcca857..27998046d 100644 --- a/coriolis/api/v1/replica_tasks_execution_actions.py +++ b/coriolis/api/v1/transfer_tasks_execution_actions.py @@ -5,25 +5,25 @@ from coriolis.api import wsgi as api_wsgi from coriolis import exception -from coriolis.policies import replica_tasks_executions as execution_policies -from coriolis.replica_tasks_executions import api +from coriolis.policies import transfer_tasks_executions as execution_policies +from coriolis.transfer_tasks_executions import api -class ReplicaTasksExecutionActionsController(api_wsgi.Controller): +class TransferTasksExecutionActionsController(api_wsgi.Controller): def __init__(self): - self._replica_tasks_execution_api = api.API() - super(ReplicaTasksExecutionActionsController, self).__init__() + self._transfer_tasks_execution_api = api.API() + super(TransferTasksExecutionActionsController, self).__init__() @api_wsgi.action('cancel') - def _cancel(self, req, replica_id, id, body): + def _cancel(self, req, transfer_id, id, body): context = req.environ['coriolis.context'] context.can( - execution_policies.get_replica_executions_policy_label('cancel')) + execution_policies.get_transfer_executions_policy_label('cancel')) try: force = (body["cancel"] or {}).get("force", False) - self._replica_tasks_execution_api.cancel( - context, replica_id, id, force) + self._transfer_tasks_execution_api.cancel( + context, transfer_id, id, force) raise exc.HTTPNoContent() except exception.NotFound as ex: raise exc.HTTPNotFound(explanation=ex.msg) @@ -32,4 +32,4 @@ def _cancel(self, req, replica_id, id, body): def create_resource(): - return api_wsgi.Resource(ReplicaTasksExecutionActionsController()) + return api_wsgi.Resource(TransferTasksExecutionActionsController()) diff --git a/coriolis/api/v1/transfer_tasks_executions.py b/coriolis/api/v1/transfer_tasks_executions.py new file mode 100644 index 000000000..68f029e2e --- /dev/null +++ b/coriolis/api/v1/transfer_tasks_executions.py @@ -0,0 +1,74 @@ +# Copyright 2016 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.api.v1.views import transfer_tasks_execution_view +from coriolis.api import wsgi as api_wsgi +from coriolis import exception +from coriolis.policies import transfer_tasks_executions as executions_policies +from coriolis.transfer_tasks_executions import api + +from webob import exc + + +class TransferTasksExecutionController(api_wsgi.Controller): + def __init__(self): + self._transfer_tasks_execution_api = api.API() + super(TransferTasksExecutionController, self).__init__() + + def show(self, req, transfer_id, id): + context = req.environ["coriolis.context"] + context.can( + executions_policies.get_transfer_executions_policy_label("show")) + execution = self._transfer_tasks_execution_api.get_execution( + context, transfer_id, id) + if not execution: + raise exc.HTTPNotFound() + + return transfer_tasks_execution_view.single(execution) + + def index(self, req, transfer_id): + context = req.environ["coriolis.context"] + context.can( + executions_policies.get_transfer_executions_policy_label("list")) + + return transfer_tasks_execution_view.collection( + self._transfer_tasks_execution_api.get_executions( + context, transfer_id, include_tasks=False)) + + def detail(self, req, transfer_id): + context = req.environ["coriolis.context"] + context.can( + executions_policies.get_transfer_executions_policy_label("show")) + + return transfer_tasks_execution_view.collection( + self._transfer_tasks_execution_api.get_executions( + context, transfer_id, include_tasks=True)) + + def create(self, req, transfer_id, body): + context = req.environ["coriolis.context"] + context.can( + executions_policies.get_transfer_executions_policy_label("create")) + + # TODO(alexpilotti): validate body + + execution_body = body.get("execution", {}) + shutdown_instances = execution_body.get("shutdown_instances", False) + + return transfer_tasks_execution_view.single( + self._transfer_tasks_execution_api.create( + context, transfer_id, shutdown_instances)) + + def delete(self, req, transfer_id, id): + context = req.environ["coriolis.context"] + context.can( + executions_policies.get_transfer_executions_policy_label("delete")) + + try: + self._transfer_tasks_execution_api.delete(context, transfer_id, id) + raise exc.HTTPNoContent() + except exception.NotFound as ex: + raise exc.HTTPNotFound(explanation=ex.msg) + + +def create_resource(): + return api_wsgi.Resource(TransferTasksExecutionController()) diff --git a/coriolis/api/v1/replicas.py b/coriolis/api/v1/transfers.py similarity index 67% rename from coriolis/api/v1/replicas.py rename to coriolis/api/v1/transfers.py index bf5c6570c..511cfd2d8 100644 --- a/coriolis/api/v1/replicas.py +++ b/coriolis/api/v1/transfers.py @@ -2,56 +2,61 @@ # All Rights Reserved. from coriolis.api.v1 import utils as api_utils -from coriolis.api.v1.views import replica_tasks_execution_view -from coriolis.api.v1.views import replica_view +from coriolis.api.v1.views import transfer_tasks_execution_view +from coriolis.api.v1.views import transfer_view from coriolis.api import wsgi as api_wsgi +from coriolis import constants from coriolis.endpoints import api as endpoints_api from coriolis import exception -from coriolis.policies import replicas as replica_policies -from coriolis.replicas import api +from coriolis.policies import transfers as transfer_policies +from coriolis.transfers import api from oslo_config import cfg as conf from oslo_log import log as logging from webob import exc -REPLICA_API_OPTS = [ - conf.BoolOpt("include_task_info_in_replicas_api", +TRANSFER_API_OPTS = [ + conf.BoolOpt("include_task_info_in_transfers_api", default=False, help="Whether or not to expose the internal 'info' field of " - "a Replica as part of a `GET` request.")] + "a Transfer as part of a `GET` request.")] CONF = conf.CONF -CONF.register_opts(REPLICA_API_OPTS, 'api') +CONF.register_opts(TRANSFER_API_OPTS, 'api') LOG = logging.getLogger(__name__) +SUPPORTED_TRANSFER_SCENARIOS = [ + constants.TRANSFER_SCENARIO_REPLICA, + constants.TRANSFER_SCENARIO_LIVE_MIGRATION] -class ReplicaController(api_wsgi.Controller): + +class TransferController(api_wsgi.Controller): def __init__(self): - self._replica_api = api.API() + self._transfer_api = api.API() self._endpoints_api = endpoints_api.API() - super(ReplicaController, self).__init__() + super(TransferController, self).__init__() def show(self, req, id): context = req.environ["coriolis.context"] - context.can(replica_policies.get_replicas_policy_label("show")) - replica = self._replica_api.get_replica( + context.can(transfer_policies.get_transfers_policy_label("show")) + transfer = self._transfer_api.get_transfer( context, id, - include_task_info=CONF.api.include_task_info_in_replicas_api) - if not replica: + include_task_info=CONF.api.include_task_info_in_transfers_api) + if not transfer: raise exc.HTTPNotFound() - return replica_view.single(replica) + return transfer_view.single(transfer) def _list(self, req): show_deleted = api_utils._get_show_deleted( req.GET.get("show_deleted", None)) context = req.environ["coriolis.context"] context.show_deleted = show_deleted - context.can(replica_policies.get_replicas_policy_label("list")) - include_task_info = CONF.api.include_task_info_in_replicas_api - return replica_view.collection( - self._replica_api.get_replicas( + context.can(transfer_policies.get_transfers_policy_label("list")) + include_task_info = CONF.api.include_task_info_in_transfers_api + return transfer_view.collection( + self._transfer_api.get_transfers( context, include_tasks_executions=include_task_info, include_task_info=include_task_info)) @@ -62,28 +67,41 @@ def index(self, req): def detail(self, req): return self._list(req) - @api_utils.format_keyerror_message(resource='replica', method='create') + @api_utils.format_keyerror_message(resource='transfer', method='create') def _validate_create_body(self, context, body): - replica = body["replica"] - - origin_endpoint_id = replica["origin_endpoint_id"] - destination_endpoint_id = replica["destination_endpoint_id"] - destination_environment = replica.get( + transfer = body["transfer"] + + scenario = transfer.get("scenario", "") + if scenario: + if scenario not in SUPPORTED_TRANSFER_SCENARIOS: + raise exc.HTTPBadRequest( + explanation=f"Unsupported Transfer creation scenario " + f"'{scenario}', must be one of: " + f"{SUPPORTED_TRANSFER_SCENARIOS}") + else: + scenario = constants.TRANSFER_SCENARIO_REPLICA + LOG.warn( + "No Transfer 'scenario' field set in Transfer body, " + f"defaulting to: '{scenario}'") + + origin_endpoint_id = transfer["origin_endpoint_id"] + destination_endpoint_id = transfer["destination_endpoint_id"] + destination_environment = transfer.get( "destination_environment", {}) instances = api_utils.validate_instances_list_for_transfer( - replica.get('instances')) + transfer.get('instances')) - notes = replica.get("notes") + notes = transfer.get("notes") - source_environment = replica.get("source_environment", {}) + source_environment = transfer.get("source_environment", {}) self._endpoints_api.validate_source_environment( context, origin_endpoint_id, source_environment) - origin_minion_pool_id = replica.get( + origin_minion_pool_id = transfer.get( 'origin_minion_pool_id') - destination_minion_pool_id = replica.get( + destination_minion_pool_id = transfer.get( 'destination_minion_pool_id') - instance_osmorphing_minion_pool_mappings = replica.get( + instance_osmorphing_minion_pool_mappings = transfer.get( 'instance_osmorphing_minion_pool_mappings', {}) extras = [ instance @@ -93,18 +111,18 @@ def _validate_create_body(self, context, body): raise ValueError( "One or more instance OSMorphing pool mappings were " "provided for instances (%s) which are not part of the " - "Replicas's declared instances (%s)" % (extras, instances)) + "Transfer's declared instances (%s)" % (extras, instances)) # TODO(aznashwan): until the provider plugin interface is updated # to have separate 'network_map' and 'storage_mappings' fields, # we add them as part of the destination environment: - network_map = replica.get("network_map", {}) + network_map = transfer.get("network_map", {}) api_utils.validate_network_map(network_map) destination_environment['network_map'] = network_map self._endpoints_api.validate_target_environment( context, destination_endpoint_id, destination_environment) - user_scripts = replica.get('user_scripts', {}) + user_scripts = transfer.get('user_scripts', {}) api_utils.validate_user_scripts(user_scripts) user_scripts = api_utils.normalize_user_scripts( user_scripts, instances) @@ -113,12 +131,12 @@ def _validate_create_body(self, context, body): # import provider before appending the 'storage_mappings' parameter # for plugins with strict property name checks which do not yet # support storage mapping features: - storage_mappings = replica.get("storage_mappings", {}) + storage_mappings = transfer.get("storage_mappings", {}) api_utils.validate_storage_mappings(storage_mappings) destination_environment['storage_mappings'] = storage_mappings - return (origin_endpoint_id, destination_endpoint_id, + return (scenario, origin_endpoint_id, destination_endpoint_id, source_environment, destination_environment, instances, network_map, storage_mappings, notes, origin_minion_pool_id, destination_minion_pool_id, @@ -126,17 +144,17 @@ def _validate_create_body(self, context, body): def create(self, req, body): context = req.environ["coriolis.context"] - context.can(replica_policies.get_replicas_policy_label("create")) + context.can(transfer_policies.get_transfers_policy_label("create")) - (origin_endpoint_id, destination_endpoint_id, + (scenario, origin_endpoint_id, destination_endpoint_id, source_environment, destination_environment, instances, network_map, storage_mappings, notes, origin_minion_pool_id, destination_minion_pool_id, instance_osmorphing_minion_pool_mappings, user_scripts) = ( self._validate_create_body(context, body)) - return replica_view.single(self._replica_api.create( - context, origin_endpoint_id, destination_endpoint_id, + return transfer_view.single(self._transfer_api.create( + context, scenario, origin_endpoint_id, destination_endpoint_id, origin_minion_pool_id, destination_minion_pool_id, instance_osmorphing_minion_pool_mappings, source_environment, destination_environment, instances, network_map, @@ -144,9 +162,9 @@ def create(self, req, body): def delete(self, req, id): context = req.environ["coriolis.context"] - context.can(replica_policies.get_replicas_policy_label("delete")) + context.can(transfer_policies.get_transfers_policy_label("delete")) try: - self._replica_api.delete(context, id) + self._transfer_api.delete(context, id) raise exc.HTTPNoContent() except exception.NotFound as ex: raise exc.HTTPNotFound(explanation=ex.msg) @@ -216,8 +234,8 @@ def _get_updated_user_scripts(original_user_scripts, new_user_scripts): return user_scripts - def _get_merged_replica_values(self, replica, updated_values): - """ Looks for the following keys in the original replica body and + def _get_merged_transfer_values(self, transfer, updated_values): + """ Looks for the following keys in the original transfer body and updated values (preferring the updated values where needed, but using `.update()` on dicts): "source_environment", "destination_environment", "network_map", "notes" @@ -231,9 +249,9 @@ def _get_merged_replica_values(self, replica, updated_values): for option in [ "source_environment", "destination_environment", "network_map"]: - before = replica.get(option) + before = transfer.get(option) after = updated_values.get(option) - # NOTE: for Replicas created before the separation of these fields + # NOTE: for Transfers created before the separation of these fields # in the DB there is the chance that some of these may be NULL: if before is None: before = {} @@ -243,7 +261,7 @@ def _get_merged_replica_values(self, replica, updated_values): final_values[option] = before - original_storage_mappings = replica.get('storage_mappings') + original_storage_mappings = transfer.get('storage_mappings') if original_storage_mappings is None: original_storage_mappings = {} new_storage_mappings = updated_values.get('storage_mappings') @@ -253,7 +271,7 @@ def _get_merged_replica_values(self, replica, updated_values): original_storage_mappings, new_storage_mappings) original_user_scripts = api_utils.validate_user_scripts( - replica.get('user_scripts', {})) + transfer.get('user_scripts', {})) new_user_scripts = api_utils.validate_user_scripts( updated_values.get('user_scripts', {})) final_values['user_scripts'] = self._get_updated_user_scripts( @@ -262,7 +280,7 @@ def _get_merged_replica_values(self, replica, updated_values): if 'notes' in updated_values: final_values['notes'] = updated_values.get('notes', '') else: - final_values['notes'] = replica.get('notes', '') + final_values['notes'] = transfer.get('notes', '') # NOTE: until the provider plugin interface is updated # to have separate 'network_map' and 'storage_mappings' fields, @@ -286,41 +304,48 @@ def _get_merged_replica_values(self, replica, updated_values): return final_values - @api_utils.format_keyerror_message(resource='replica', method='update') + @api_utils.format_keyerror_message(resource='transfer', method='update') def _validate_update_body(self, id, context, body): + transfer = self._transfer_api.get_transfer(context, id) + + scenario = body.get("scenario", "") + if scenario and scenario != transfer["scenario"]: + raise exc.HTTPBadRequest( + explanation=f"Changing Transfer creation scenario is not " + f"supported (original scenario is " + f"{transfer['scenario']}, received '{scenario}')") - replica = self._replica_api.get_replica(context, id) - replica_body = body['replica'] - origin_endpoint_id = replica_body.get('origin_endpoint_id', None) - destination_endpoint_id = replica_body.get( + transfer_body = body['transfer'] + origin_endpoint_id = transfer_body.get('origin_endpoint_id', None) + destination_endpoint_id = transfer_body.get( 'destination_endpoint_id', None) - instances = body['replica'].get('instances', None) + instances = body['transfer'].get('instances', None) if origin_endpoint_id or destination_endpoint_id: raise exc.HTTPBadRequest( explanation="The source or destination endpoints for a " - "Coriolis Replica cannot be updated after its " + "Coriolis Transfer cannot be updated after its " "creation. If the credentials of any of the " - "Replica's endpoints need updating, please update " - "the endpoints themselves.") + "Transfer's endpoints need updating, please " + "update the endpoints themselves.") if instances: raise exc.HTTPBadRequest( - explanation="The list of instances of a Replica cannot be " + explanation="The list of instances of a Transfer cannot be " "updated") - merged_body = self._get_merged_replica_values( - replica, replica_body) + merged_body = self._get_merged_transfer_values( + transfer, transfer_body) - replica_origin_endpoint_id = replica["origin_endpoint_id"] - replica_destination_endpoint_id = replica[ + transfer_origin_endpoint_id = transfer["origin_endpoint_id"] + transfer_destination_endpoint_id = transfer[ "destination_endpoint_id"] self._endpoints_api.validate_source_environment( - context, replica_origin_endpoint_id, + context, transfer_origin_endpoint_id, merged_body["source_environment"]) destination_environment = merged_body["destination_environment"] self._endpoints_api.validate_target_environment( - context, replica_destination_endpoint_id, + context, transfer_destination_endpoint_id, destination_environment) api_utils.validate_network_map(merged_body["network_map"]) @@ -331,19 +356,19 @@ def _validate_update_body(self, id, context, body): user_scripts = merged_body['user_scripts'] api_utils.validate_user_scripts(user_scripts) merged_body['user_scripts'] = api_utils.normalize_user_scripts( - user_scripts, replica.get('instances', [])) + user_scripts, transfer.get('instances', [])) return merged_body def update(self, req, id, body): context = req.environ["coriolis.context"] - context.can(replica_policies.get_replicas_policy_label("update")) + context.can(transfer_policies.get_transfers_policy_label("update")) updated_values = self._validate_update_body(id, context, body) try: - return replica_tasks_execution_view.single( - self._replica_api.update(req.environ['coriolis.context'], - id, updated_values)) + return transfer_tasks_execution_view.single( + self._transfer_api.update(req.environ['coriolis.context'], + id, updated_values)) except exception.NotFound as ex: raise exc.HTTPNotFound(explanation=ex.msg) except exception.InvalidParameterValue as ex: @@ -351,4 +376,4 @@ def update(self, req, id, body): def create_resource(): - return api_wsgi.Resource(ReplicaController()) + return api_wsgi.Resource(TransferController()) diff --git a/coriolis/api/v1/views/deployment_view.py b/coriolis/api/v1/views/deployment_view.py new file mode 100644 index 000000000..1a32f562f --- /dev/null +++ b/coriolis/api/v1/views/deployment_view.py @@ -0,0 +1,32 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.api.v1.views import transfer_tasks_execution_view as view +from coriolis.api.v1.views import utils as view_utils + + +def _format_deployment(deployment, keys=None): + deployment_dict = view_utils.format_opt(deployment, keys) + + if len(deployment_dict.get("executions", [])): + execution = view.format_transfer_tasks_execution( + deployment_dict["executions"][0], keys) + del deployment_dict["executions"] + else: + execution = {} + + tasks = execution.get("tasks") + if tasks: + deployment_dict["tasks"] = tasks + + return deployment_dict + + +def single(deployment, keys=None): + return {"deployment": _format_deployment(deployment, keys)} + + +def collection(deployments, keys=None): + formatted_deployments = [_format_deployment(m, keys) + for m in deployments] + return {'deployments': formatted_deployments} diff --git a/coriolis/api/v1/views/migration_view.py b/coriolis/api/v1/views/migration_view.py deleted file mode 100644 index f130a3df8..000000000 --- a/coriolis/api/v1/views/migration_view.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.api.v1.views import replica_tasks_execution_view as view -from coriolis.api.v1.views import utils as view_utils - - -def _format_migration(migration, keys=None): - migration_dict = view_utils.format_opt(migration, keys) - - if len(migration_dict.get("executions", [])): - execution = view.format_replica_tasks_execution( - migration_dict["executions"][0], keys) - del migration_dict["executions"] - else: - execution = {} - - tasks = execution.get("tasks") - if tasks: - migration_dict["tasks"] = tasks - - return migration_dict - - -def single(migration, keys=None): - return {"migration": _format_migration(migration, keys)} - - -def collection(migrations, keys=None): - formatted_migrations = [_format_migration(m, keys) - for m in migrations] - return {'migrations': formatted_migrations} diff --git a/coriolis/api/v1/views/replica_view.py b/coriolis/api/v1/views/replica_view.py deleted file mode 100644 index abc38f2da..000000000 --- a/coriolis/api/v1/views/replica_view.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.api.v1.views import replica_tasks_execution_view as view -from coriolis.api.v1.views import utils as view_utils - - -def _format_replica(replica, keys=None): - replica_dict = view_utils.format_opt(replica, keys) - - executions = replica_dict.get('executions', []) - replica_dict['executions'] = [ - view.format_replica_tasks_execution(ex) - for ex in executions] - - return replica_dict - - -def single(replica, keys=None): - return {"replica": _format_replica(replica, keys)} - - -def collection(replicas, keys=None): - formatted_replicas = [_format_replica(m, keys) - for m in replicas] - return {'replicas': formatted_replicas} diff --git a/coriolis/api/v1/views/replica_schedule_view.py b/coriolis/api/v1/views/transfer_schedule_view.py similarity index 100% rename from coriolis/api/v1/views/replica_schedule_view.py rename to coriolis/api/v1/views/transfer_schedule_view.py diff --git a/coriolis/api/v1/views/replica_tasks_execution_view.py b/coriolis/api/v1/views/transfer_tasks_execution_view.py similarity index 82% rename from coriolis/api/v1/views/replica_tasks_execution_view.py rename to coriolis/api/v1/views/transfer_tasks_execution_view.py index 96359a18f..3873d1874 100644 --- a/coriolis/api/v1/views/replica_tasks_execution_view.py +++ b/coriolis/api/v1/views/transfer_tasks_execution_view.py @@ -25,7 +25,7 @@ def _sort_tasks(tasks, filter_error_only_tasks=True): tasks, key=lambda t: t.get('index', 0)) -def format_replica_tasks_execution(execution, keys=None): +def format_transfer_tasks_execution(execution, keys=None): if "tasks" in execution: execution["tasks"] = _sort_tasks(execution["tasks"]) @@ -35,10 +35,10 @@ def format_replica_tasks_execution(execution, keys=None): def single(execution, keys=None): - return {"execution": format_replica_tasks_execution(execution, keys)} + return {"execution": format_transfer_tasks_execution(execution, keys)} def collection(executions, keys=None): - formatted_executions = [format_replica_tasks_execution(m, keys) + formatted_executions = [format_transfer_tasks_execution(m, keys) for m in executions] return {'executions': formatted_executions} diff --git a/coriolis/api/v1/views/transfer_view.py b/coriolis/api/v1/views/transfer_view.py new file mode 100644 index 000000000..2fcefe4f1 --- /dev/null +++ b/coriolis/api/v1/views/transfer_view.py @@ -0,0 +1,25 @@ +# Copyright 2016 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.api.v1.views import transfer_tasks_execution_view as view +from coriolis.api.v1.views import utils as view_utils + + +def _format_transfer(transfer, keys=None): + transfer_dict = view_utils.format_opt(transfer, keys) + + executions = transfer_dict.get('executions', []) + transfer_dict['executions'] = [ + view.format_transfer_tasks_execution(ex) + for ex in executions] + + return transfer_dict + + +def single(transfer, keys=None): + return {"transfer": _format_transfer(transfer, keys)} + + +def collection(transfers, keys=None): + formatted_transfers = [_format_transfer(t, keys) for t in transfers] + return {'transfers': formatted_transfers} diff --git a/coriolis/cmd/replica_cron.py b/coriolis/cmd/transfer_cron.py similarity index 77% rename from coriolis/cmd/replica_cron.py rename to coriolis/cmd/transfer_cron.py index 2484ccde6..3605389c4 100644 --- a/coriolis/cmd/replica_cron.py +++ b/coriolis/cmd/transfer_cron.py @@ -6,8 +6,8 @@ from oslo_config import cfg from coriolis import constants -from coriolis.replica_cron.rpc import server as rpc_server from coriolis import service +from coriolis.transfer_cron.rpc import server as rpc_server from coriolis import utils CONF = cfg.CONF @@ -19,8 +19,8 @@ def main(): utils.setup_logging() server = service.MessagingService( - constants.REPLICA_CRON_MAIN_MESSAGING_TOPIC, - [rpc_server.ReplicaCronServerEndpoint()], + constants.TRANSFER_CRON_MAIN_MESSAGING_TOPIC, + [rpc_server.TransferCronServerEndpoint()], rpc_server.VERSION, worker_count=1) launcher = service.service.launch( CONF, server, workers=server.get_workers_count()) diff --git a/coriolis/conductor/rpc/client.py b/coriolis/conductor/rpc/client.py index 3bfd216e5..45148de60 100644 --- a/coriolis/conductor/rpc/client.py +++ b/coriolis/conductor/rpc/client.py @@ -9,7 +9,6 @@ from coriolis import events from coriolis import rpc - VERSION = "1.0" LOG = logging.getLogger(__name__) @@ -129,46 +128,49 @@ def get_provider_schemas(self, ctxt, platform_name, provider_type): platform_name=platform_name, provider_type=provider_type) - def execute_replica_tasks(self, ctxt, replica_id, - shutdown_instances=False): + def execute_transfer_tasks(self, ctxt, transfer_id, + shutdown_instances=False): return self._call( - ctxt, 'execute_replica_tasks', replica_id=replica_id, + ctxt, 'execute_transfer_tasks', transfer_id=transfer_id, shutdown_instances=shutdown_instances) - def get_replica_tasks_executions(self, ctxt, replica_id, - include_tasks=False): + def get_transfer_tasks_executions(self, ctxt, transfer_id, + include_tasks=False): return self._call( - ctxt, 'get_replica_tasks_executions', - replica_id=replica_id, + ctxt, 'get_transfer_tasks_executions', + transfer_id=transfer_id, include_tasks=include_tasks) - def get_replica_tasks_execution(self, ctxt, replica_id, execution_id, - include_task_info=False): + def get_transfer_tasks_execution(self, ctxt, transfer_id, execution_id, + include_task_info=False): return self._call( - ctxt, 'get_replica_tasks_execution', replica_id=replica_id, + ctxt, 'get_transfer_tasks_execution', transfer_id=transfer_id, execution_id=execution_id, include_task_info=include_task_info) - def delete_replica_tasks_execution(self, ctxt, replica_id, execution_id): + def delete_transfer_tasks_execution(self, ctxt, transfer_id, execution_id): return self._call( - ctxt, 'delete_replica_tasks_execution', replica_id=replica_id, + ctxt, 'delete_transfer_tasks_execution', transfer_id=transfer_id, execution_id=execution_id) - def cancel_replica_tasks_execution(self, ctxt, replica_id, execution_id, - force): + def cancel_transfer_tasks_execution(self, ctxt, transfer_id, execution_id, + force): return self._call( - ctxt, 'cancel_replica_tasks_execution', replica_id=replica_id, + ctxt, 'cancel_transfer_tasks_execution', transfer_id=transfer_id, execution_id=execution_id, force=force) - def create_instances_replica(self, ctxt, origin_endpoint_id, - destination_endpoint_id, - origin_minion_pool_id, - destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, - instances, network_map, storage_mappings, - notes=None, user_scripts=None): - return self._call( - ctxt, 'create_instances_replica', + def create_instances_transfer(self, ctxt, + transfer_scenario, + origin_endpoint_id, + destination_endpoint_id, + origin_minion_pool_id, + destination_minion_pool_id, + instance_osmorphing_minion_pool_mappings, + source_environment, destination_environment, + instances, network_map, storage_mappings, + notes=None, user_scripts=None): + return self._call( + ctxt, 'create_instances_transfer', + transfer_scenario=transfer_scenario, origin_endpoint_id=origin_endpoint_id, destination_endpoint_id=destination_endpoint_id, origin_minion_pool_id=origin_minion_pool_id, @@ -183,84 +185,57 @@ def create_instances_replica(self, ctxt, origin_endpoint_id, source_environment=source_environment, user_scripts=user_scripts) - def get_replicas(self, ctxt, include_tasks_executions=False, - include_task_info=False): + def get_transfers(self, ctxt, include_tasks_executions=False, + include_task_info=False): return self._call( - ctxt, 'get_replicas', + ctxt, 'get_transfers', include_tasks_executions=include_tasks_executions, include_task_info=include_task_info) - def get_replica(self, ctxt, replica_id, include_task_info=False): + def get_transfer(self, ctxt, transfer_id, include_task_info=False): return self._call( - ctxt, 'get_replica', replica_id=replica_id, + ctxt, 'get_transfer', transfer_id=transfer_id, include_task_info=include_task_info) - def delete_replica(self, ctxt, replica_id): + def delete_transfer(self, ctxt, transfer_id): self._call( - ctxt, 'delete_replica', replica_id=replica_id) + ctxt, 'delete_transfer', transfer_id=transfer_id) - def delete_replica_disks(self, ctxt, replica_id): + def delete_transfer_disks(self, ctxt, transfer_id): return self._call( - ctxt, 'delete_replica_disks', replica_id=replica_id) + ctxt, 'delete_transfer_disks', transfer_id=transfer_id) - def get_migrations(self, ctxt, include_tasks=False, - include_task_info=False): + def get_deployments(self, ctxt, include_tasks=False, + include_task_info=False): return self._call( - ctxt, 'get_migrations', include_tasks=include_tasks, + ctxt, 'get_deployments', include_tasks=include_tasks, include_task_info=include_task_info) - def get_migration(self, ctxt, migration_id, include_task_info=False): + def get_deployment(self, ctxt, deployment_id, include_task_info=False): return self._call( - ctxt, 'get_migration', migration_id=migration_id, + ctxt, 'get_deployment', deployment_id=deployment_id, include_task_info=include_task_info) - def migrate_instances(self, ctxt, origin_endpoint_id, - destination_endpoint_id, origin_minion_pool_id, - destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, - instances, network_map, storage_mappings, - replication_count, shutdown_instances=False, - notes=None, skip_os_morphing=False, - user_scripts=None): - return self._call( - ctxt, 'migrate_instances', - origin_endpoint_id=origin_endpoint_id, - destination_endpoint_id=destination_endpoint_id, - origin_minion_pool_id=origin_minion_pool_id, - destination_minion_pool_id=destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings=( - instance_osmorphing_minion_pool_mappings), - destination_environment=destination_environment, - instances=instances, - notes=notes, - replication_count=replication_count, - shutdown_instances=shutdown_instances, - skip_os_morphing=skip_os_morphing, - network_map=network_map, - storage_mappings=storage_mappings, - source_environment=source_environment, - user_scripts=user_scripts) - - def deploy_replica_instances( - self, ctxt, replica_id, + def deploy_transfer_instances( + self, ctxt, transfer_id, instance_osmorphing_minion_pool_mappings=None, clone_disks=False, force=False, skip_os_morphing=False, user_scripts=None): return self._call( - ctxt, 'deploy_replica_instances', replica_id=replica_id, + ctxt, 'deploy_transfer_instances', transfer_id=transfer_id, instance_osmorphing_minion_pool_mappings=( instance_osmorphing_minion_pool_mappings), clone_disks=clone_disks, force=force, skip_os_morphing=skip_os_morphing, user_scripts=user_scripts) - def delete_migration(self, ctxt, migration_id): + def delete_deployment(self, ctxt, deployment_id): self._call( - ctxt, 'delete_migration', migration_id=migration_id) + ctxt, 'delete_deployment', deployment_id=deployment_id) - def cancel_migration(self, ctxt, migration_id, force): + def cancel_deployment(self, ctxt, deployment_id, force): self._call( - ctxt, 'cancel_migration', migration_id=migration_id, force=force) + ctxt, 'cancel_deployment', deployment_id=deployment_id, + force=force) def set_task_host(self, ctxt, task_id, host): self._call( @@ -308,48 +283,48 @@ def update_task_progress_update( new_current_step=new_current_step, new_total_steps=new_total_steps, new_message=new_message) - def create_replica_schedule(self, ctxt, replica_id, - schedule, enabled, exp_date, - shutdown_instance): + def create_transfer_schedule(self, ctxt, transfer_id, + schedule, enabled, exp_date, + shutdown_instance): return self._call( - ctxt, 'create_replica_schedule', - replica_id=replica_id, + ctxt, 'create_transfer_schedule', + transfer_id=transfer_id, schedule=schedule, enabled=enabled, exp_date=exp_date, shutdown_instance=shutdown_instance) - def update_replica_schedule(self, ctxt, replica_id, schedule_id, - updated_values): + def update_transfer_schedule(self, ctxt, transfer_id, schedule_id, + updated_values): return self._call( - ctxt, 'update_replica_schedule', - replica_id=replica_id, + ctxt, 'update_transfer_schedule', + transfer_id=transfer_id, schedule_id=schedule_id, updated_values=updated_values) - def delete_replica_schedule(self, ctxt, replica_id, schedule_id): + def delete_transfer_schedule(self, ctxt, transfer_id, schedule_id): return self._call( - ctxt, 'delete_replica_schedule', - replica_id=replica_id, + ctxt, 'delete_transfer_schedule', + transfer_id=transfer_id, schedule_id=schedule_id) - def get_replica_schedules(self, ctxt, replica_id=None, expired=True): + def get_transfer_schedules(self, ctxt, transfer_id=None, expired=True): return self._call( - ctxt, 'get_replica_schedules', - replica_id=replica_id, expired=expired) + ctxt, 'get_transfer_schedules', + transfer_id=transfer_id, expired=expired) - def get_replica_schedule(self, ctxt, replica_id, - schedule_id, expired=True): + def get_transfer_schedule(self, ctxt, transfer_id, + schedule_id, expired=True): return self._call( - ctxt, 'get_replica_schedule', - replica_id=replica_id, + ctxt, 'get_transfer_schedule', + transfer_id=transfer_id, schedule_id=schedule_id, expired=expired) - def update_replica(self, ctxt, replica_id, updated_properties): + def update_transfer(self, ctxt, transfer_id, updated_properties): return self._call( - ctxt, 'update_replica', - replica_id=replica_id, + ctxt, 'update_transfer', + transfer_id=transfer_id, updated_properties=updated_properties) def get_diagnostics(self, ctxt): @@ -416,31 +391,32 @@ def delete_service(self, ctxt, service_id): return self._call( ctxt, 'delete_service', service_id=service_id) - def confirm_replica_minions_allocation( - self, ctxt, replica_id, minion_machine_allocations): + def confirm_transfer_minions_allocation( + self, ctxt, transfer_id, minion_machine_allocations): self._call( - ctxt, 'confirm_replica_minions_allocation', replica_id=replica_id, + ctxt, 'confirm_transfer_minions_allocation', + transfer_id=transfer_id, minion_machine_allocations=minion_machine_allocations) - def report_replica_minions_allocation_error( - self, ctxt, replica_id, minion_allocation_error_details): + def report_transfer_minions_allocation_error( + self, ctxt, transfer_id, minion_allocation_error_details): self._call( - ctxt, 'report_replica_minions_allocation_error', - replica_id=replica_id, + ctxt, 'report_transfer_minions_allocation_error', + transfer_id=transfer_id, minion_allocation_error_details=minion_allocation_error_details) - def confirm_migration_minions_allocation( - self, ctxt, migration_id, minion_machine_allocations): + def confirm_deployment_minions_allocation( + self, ctxt, deployment_id, minion_machine_allocations): self._call( - ctxt, 'confirm_migration_minions_allocation', - migration_id=migration_id, + ctxt, 'confirm_deployment_minions_allocation', + deployment_id=deployment_id, minion_machine_allocations=minion_machine_allocations) - def report_migration_minions_allocation_error( - self, ctxt, migration_id, minion_allocation_error_details): + def report_deployment_minions_allocation_error( + self, ctxt, deployment_id, minion_allocation_error_details): self._call( - ctxt, 'report_migration_minions_allocation_error', - migration_id=migration_id, + ctxt, 'report_deployment_minions_allocation_error', + deployment_id=deployment_id, minion_allocation_error_details=minion_allocation_error_details) @@ -460,7 +436,7 @@ def _rpc_conductor_client(self): return self._rpc_conductor_client_instance @classmethod - def get_progress_update_identifier(self, progress_update): + def get_progress_update_identifier(cls, progress_update): return progress_update['index'] def add_progress_update( diff --git a/coriolis/conductor/rpc/server.py b/coriolis/conductor/rpc/server.py index ecdb6d64b..46787b4d5 100644 --- a/coriolis/conductor/rpc/server.py +++ b/coriolis/conductor/rpc/server.py @@ -18,10 +18,10 @@ from coriolis import keystone from coriolis.licensing import client as licensing_client from coriolis.minion_manager.rpc import client as rpc_minion_manager_client -from coriolis.replica_cron.rpc import client as rpc_cron_client from coriolis.scheduler.rpc import client as rpc_scheduler_client from coriolis import schemas from coriolis.tasks import factory as tasks_factory +from coriolis.transfer_cron.rpc import client as rpc_cron_client from coriolis import utils from coriolis.worker.rpc import client as rpc_worker_client @@ -45,6 +45,13 @@ "A fatal deadlock has occurred. Further debugging is required. " "Please review the Conductor logs and contact support for assistance.") +SCENARIO_TYPE_TO_LICENSING_RESERVATION_MAP = { + constants.TRANSFER_SCENARIO_REPLICA: + licensing_client.RESERVATION_TYPE_REPLICA, + constants.TRANSFER_SCENARIO_LIVE_MIGRATION: + licensing_client.RESERVATION_TYPE_MIGRATION +} + def endpoint_synchronized(func): @functools.wraps(func) @@ -58,26 +65,26 @@ def inner(): return wrapper -def replica_synchronized(func): +def transfer_synchronized(func): @functools.wraps(func) - def wrapper(self, ctxt, replica_id, *args, **kwargs): + def wrapper(self, ctxt, transfer_id, *args, **kwargs): @lockutils.synchronized( - constants.REPLICA_LOCK_NAME_FORMAT % replica_id, + constants.TRANSFER_LOCK_NAME_FORMAT % transfer_id, external=True) def inner(): - return func(self, ctxt, replica_id, *args, **kwargs) + return func(self, ctxt, transfer_id, *args, **kwargs) return inner() return wrapper def schedule_synchronized(func): @functools.wraps(func) - def wrapper(self, ctxt, replica_id, schedule_id, *args, **kwargs): + def wrapper(self, ctxt, transfer_id, schedule_id, *args, **kwargs): @lockutils.synchronized( constants.SCHEDULE_LOCK_NAME_FORMAT % schedule_id, external=True) def inner(): - return func(self, ctxt, replica_id, schedule_id, *args, **kwargs) + return func(self, ctxt, transfer_id, schedule_id, *args, **kwargs) return inner() return wrapper @@ -111,26 +118,26 @@ def inner(): return wrapper -def migration_synchronized(func): +def deployment_synchronized(func): @functools.wraps(func) - def wrapper(self, ctxt, migration_id, *args, **kwargs): + def wrapper(self, ctxt, deployment_id, *args, **kwargs): @lockutils.synchronized( - constants.MIGRATION_LOCK_NAME_FORMAT % migration_id, + constants.DEPLOYMENT_LOCK_NAME_FORMAT % deployment_id, external=True) def inner(): - return func(self, ctxt, migration_id, *args, **kwargs) + return func(self, ctxt, deployment_id, *args, **kwargs) return inner() return wrapper def tasks_execution_synchronized(func): @functools.wraps(func) - def wrapper(self, ctxt, replica_id, execution_id, *args, **kwargs): + def wrapper(self, ctxt, transfer_id, execution_id, *args, **kwargs): @lockutils.synchronized( constants.EXECUTION_LOCK_NAME_FORMAT % execution_id, external=True) def inner(): - return func(self, ctxt, replica_id, execution_id, *args, **kwargs) + return func(self, ctxt, transfer_id, execution_id, *args, **kwargs) return inner() return wrapper @@ -164,7 +171,7 @@ def __init__(self): self._licensing_client = licensing_client.LicensingClient.from_env() self._worker_client_instance = None self._scheduler_client_instance = None - self._replica_cron_client_instance = None + self._transfer_cron_client_instance = None self._minion_manager_client_instance = None # NOTE(aznashwan): it is unsafe to fork processes with pre-instantiated @@ -187,11 +194,11 @@ def _scheduler_client(self): return self._scheduler_client_instance @property - def _replica_cron_client(self): - if not self._replica_cron_client_instance: - self._replica_cron_client_instance = ( - rpc_cron_client.ReplicaCronClient()) - return self._replica_cron_client_instance + def _transfer_cron_client(self): + if not self._transfer_cron_client_instance: + self._transfer_cron_client_instance = ( + rpc_cron_client.TransferCronClient()) + return self._transfer_cron_client_instance @property def _minion_manager_client(self): @@ -203,7 +210,7 @@ def _minion_manager_client(self): def get_all_diagnostics(self, ctxt): client_objects = { "conductor": self, - "replica_cron": self._replica_cron_client, + "transfer_cron": self._transfer_cron_client, "minion_manager": self._minion_manager_client, "scheduler": self._scheduler_client} @@ -278,40 +285,117 @@ def _check_delete_reservation_for_transfer(self, transfer_action): "action with ID '%s'. Skipping. Exception\n%s", reservation_id, action_id, utils.get_exception_details()) - def _check_create_reservation_for_transfer( - self, transfer_action, transfer_type): - action_id = transfer_action.base_id + def _create_reservation_for_transfer(self, transfer): + action_id = transfer.base_id + scenario = transfer.scenario + reservation_type = SCENARIO_TYPE_TO_LICENSING_RESERVATION_MAP.get( + scenario, None) + if not reservation_type: + raise exception.LicensingException( + message="Could not determine reservation type for transfer " + f"'{action_id}' with scenario '{transfer.scenario}'.") if not self._licensing_client: LOG.warn( "Licensing client not instantiated. Skipping creation of " "reservation for transfer action '%s'", action_id) return - ninstances = len(transfer_action.instances) + ninstances = len(transfer.instances) LOG.debug( "Attempting to create '%s' reservation for %d instances for " "transfer action with ID '%s'.", - transfer_type, ninstances, action_id) + reservation_type, ninstances, action_id) reservation = self._licensing_client.add_reservation( - transfer_type, ninstances) - transfer_action.reservation_id = reservation['id'] + reservation_type, ninstances) + + LOG.info( + f"Sucessfully created licensing reservation for transfer " + f"with ID '{action_id}' with properties: {reservation}") + transfer.reservation_id = reservation['id'] + + return reservation - def _check_reservation_for_transfer( - self, transfer_action, reservation_type): + def _get_licensing_reservation_for_action(self, transfer_action): action_id = transfer_action.base_id + if not self._licensing_client: + LOG.warn( + f"Licensing client not instantiated. Skipping getting " + f"reservation for transfer action '{action_id}'") + return None + + reservation_id = transfer_action.reservation_id + if not reservation_id: + LOG.warn( + f"No reservation_id set on transfer action '{action_id}'") + return None + + return self._licensing_client.get_reservation(reservation_id) + + def _check_mark_reservation_fulfilled( + self, transfer_action, must_unfulfilled=False): + action_id = transfer_action.id + reservation = self._get_licensing_reservation_for_action( + transfer_action) + if not reservation: + LOG.info( + f"No licensing reservation found for transfer action " + f"'{action_id}'. Skipping marking fulfilled.") + return + + reservation_id = reservation['id'] + fulfilled = reservation.get("fulfilled_at", None) + if fulfilled: + if must_unfulfilled: + raise exception.Conflict( + f"A licensing reservation with ID {reservation_id} " + "already exists and has been marked as fulfilled " + "within the licensing server. Please create a new " + "transfer operation in order to obtain a new " + "reservation.") + LOG.debug( + f"Reservation with ID '{reservation_id}' for transfer " + f"transfer action '{action_id}' was already marked as " + f"fulfilled") + else: + self._licensing_client.mark_reservation_fulfilled(reservation_id) + LOG.debug( + f"Successfully marked reservation with ID '{reservation_id}' " + f"for transfer action '{action_id}' as fulfilled") + + def _check_reservation_for_transfer(self, transfer): + scenario = transfer.scenario + reservation_type = SCENARIO_TYPE_TO_LICENSING_RESERVATION_MAP.get( + scenario, None) + if not reservation_type: + raise exception.LicensingException( + message="Could not determine reservation type for transfer " + f"'{transfer.id}' with scenario " + f"'{transfer.scenario}'.") + + action_id = transfer.base_id if not self._licensing_client: LOG.warn( "Licensing client not instantiated. Skipping checking of " "reservation for transfer action '%s'", action_id) return - reservation_id = transfer_action.reservation_id + reservation_id = transfer.reservation_id if reservation_id: LOG.debug( "Attempting to check reservation with ID '%s' for transfer " "action '%s'", reservation_id, action_id) try: - transfer_action.reservation_id = ( + reservation = self._licensing_client.get_reservation( + reservation_id) + + fulfilled_at = reservation.get("fulfilled_at", None) + if scenario == constants.TRANSFER_SCENARIO_LIVE_MIGRATION and ( + fulfilled_at): + raise exception.MigrationLicenceFulfilledException( + action_id=transfer.id, reservation_id=reservation_id, + fulfilled_at=fulfilled_at) + + transfer.reservation_id = ( self._licensing_client.check_refresh_reservation( reservation_id)['id']) except Exception as ex: @@ -319,7 +403,7 @@ def _check_reservation_for_transfer( if exc_code in [404, 409]: if exc_code == 409: LOG.debug( - "Server-side exception occurred while trying to " + "Licensing-side conflict occurred while trying to " "check the existing reservation '%s' for action " "'%s'. Attempting to create a new reservation. " "Trace was: %s", @@ -332,15 +416,14 @@ def _check_reservation_for_transfer( "reservation. Trace was: %s", reservation_id, action_id, utils.get_exception_details()) - self._check_create_reservation_for_transfer( - transfer_action, reservation_type) + self._create_reservation_for_transfer(transfer) else: raise ex else: - LOG.debug( - "Transfer action '%s' has no reservation ID set.", action_id) - self._check_create_reservation_for_transfer( - transfer_action, reservation_type) + LOG.info( + f"Transfer action '{action_id}' has no reservation ID set, " + f"attempting to create a new one for it") + self._create_reservation_for_transfer(transfer) def create_endpoint(self, ctxt, name, endpoint_type, description, connection_info, mapped_regions=None): @@ -393,11 +476,11 @@ def get_endpoint(self, ctxt, endpoint_id): @endpoint_synchronized def delete_endpoint(self, ctxt, endpoint_id): - q_replicas_count = db_api.get_endpoint_replicas_count( + q_transfers_count = db_api.get_endpoint_transfers_count( ctxt, endpoint_id) - if q_replicas_count is not 0: - raise exception.NotAuthorized("%s replicas would be orphaned!" % - q_replicas_count) + if q_transfers_count != 0: + raise exception.NotAuthorized("%s transfers would be orphaned!" % + q_transfers_count) db_api.delete_endpoint(ctxt, endpoint_id) def get_endpoint_instances(self, ctxt, endpoint_id, source_environment, @@ -810,40 +893,40 @@ def _check_task_cls_param_requirements(task, instance_task_info_keys): "for ordering or state conflicts.", execution.id, execution.type) - @replica_synchronized - def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): - replica = self._get_replica(ctxt, replica_id, include_task_info=True) - self._check_reservation_for_transfer( - replica, licensing_client.RESERVATION_TYPE_REPLICA) - self._check_replica_running_executions(ctxt, replica) - self._check_minion_pools_for_action(ctxt, replica) + @transfer_synchronized + def execute_transfer_tasks(self, ctxt, transfer_id, shutdown_instances): + transfer = self._get_transfer( + ctxt, transfer_id, include_task_info=True) + self._check_transfer_running_executions(ctxt, transfer) + self._check_minion_pools_for_action(ctxt, transfer) + self._check_reservation_for_transfer(transfer) execution = models.TasksExecution() execution.id = str(uuid.uuid4()) - execution.action = replica + execution.action = transfer execution.status = constants.EXECUTION_STATUS_UNEXECUTED - execution.type = constants.EXECUTION_TYPE_REPLICA_EXECUTION + execution.type = constants.EXECUTION_TYPE_TRANSFER_EXECUTION # TODO(aznashwan): have these passed separately to the relevant # provider methods. They're currently passed directly inside # dest-env by the API service when accepting the call, but we - # re-overwrite them here in case of Replica updates. - dest_env = copy.deepcopy(replica.destination_environment) - dest_env['network_map'] = replica.network_map - dest_env['storage_mappings'] = replica.storage_mappings + # re-overwrite them here in case of Transfer updates. + dest_env = copy.deepcopy(transfer.destination_environment) + dest_env['network_map'] = transfer.network_map + dest_env['storage_mappings'] = transfer.storage_mappings for instance in execution.action.instances: # NOTE: we default/convert the volumes info to an empty list # to preserve backwards-compatibility with older versions # of Coriolis dating before the scheduling overhaul (PR##114) - if instance not in replica.info: - replica.info[instance] = {'volumes_info': []} - elif replica.info[instance].get('volumes_info') is None: - replica.info[instance]['volumes_info'] = [] + if instance not in transfer.info: + transfer.info[instance] = {'volumes_info': []} + elif transfer.info[instance].get('volumes_info') is None: + transfer.info[instance]['volumes_info'] = [] # NOTE: we update all of the param values before triggering an # execution to ensure that the latest parameters are used: - replica.info[instance].update({ - "source_environment": replica.source_environment, + transfer.info[instance].update({ + "source_environment": transfer.source_environment, "target_environment": dest_env}) # TODO(aznashwan): have these passed separately to the relevant # provider methods (they're currently passed directly inside @@ -851,9 +934,9 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): # "network_map": network_map, # "storage_mappings": storage_mappings, - validate_replica_source_inputs_task = self._create_task( + validate_transfer_source_inputs_task = self._create_task( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_SOURCE_INPUTS, + constants.TASK_TYPE_VALIDATE_TRANSFER_SOURCE_INPUTS, execution) get_instance_info_task = self._create_task( @@ -861,20 +944,20 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): constants.TASK_TYPE_GET_INSTANCE_INFO, execution) - validate_replica_destination_inputs_task = self._create_task( + validate_transfer_destination_inputs_task = self._create_task( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_DESTINATION_INPUTS, + constants.TASK_TYPE_VALIDATE_TRANSFER_DESTINATION_INPUTS, execution, depends_on=[get_instance_info_task.id]) disk_deployment_depends_on = [] validate_origin_minion_task = None - if replica.origin_minion_pool_id: + if transfer.origin_minion_pool_id: # NOTE: these values are required for the # _check_execution_tasks_sanity call but # will be populated later when the pool # allocations actually happen: - replica.info[instance].update({ + transfer.info[instance].update({ "origin_minion_machine_id": None, "origin_minion_provider_properties": None, "origin_minion_connection_info": None}) @@ -884,20 +967,20 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): execution, depends_on=[ get_instance_info_task.id, - validate_replica_source_inputs_task.id]) + validate_transfer_source_inputs_task.id]) disk_deployment_depends_on.append( validate_origin_minion_task.id) else: disk_deployment_depends_on.append( - validate_replica_source_inputs_task.id) + validate_transfer_source_inputs_task.id) validate_destination_minion_task = None - if replica.destination_minion_pool_id: + if transfer.destination_minion_pool_id: # NOTE: these values are required for the # _check_execution_tasks_sanity call but # will be populated later when the pool # allocations actually happen: - replica.info[instance].update({ + transfer.info[instance].update({ "destination_minion_machine_id": None, "destination_minion_provider_properties": None, "destination_minion_connection_info": None, @@ -907,42 +990,42 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): constants.TASK_TYPE_VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY, # noqa: E501 execution, depends_on=[ - validate_replica_destination_inputs_task.id]) + validate_transfer_destination_inputs_task.id]) disk_deployment_depends_on.append( validate_destination_minion_task.id) else: disk_deployment_depends_on.append( - validate_replica_destination_inputs_task.id) + validate_transfer_destination_inputs_task.id) - deploy_replica_disks_task = self._create_task( - instance, constants.TASK_TYPE_DEPLOY_REPLICA_DISKS, + deploy_transfer_disks_task = self._create_task( + instance, constants.TASK_TYPE_DEPLOY_TRANSFER_DISKS, execution, depends_on=disk_deployment_depends_on) shutdown_deps = [] - deploy_replica_source_resources_task = None - if not replica.origin_minion_pool_id: - deploy_replica_source_resources_task = self._create_task( + deploy_transfer_source_resources_task = None + if not transfer.origin_minion_pool_id: + deploy_transfer_source_resources_task = self._create_task( instance, - constants.TASK_TYPE_DEPLOY_REPLICA_SOURCE_RESOURCES, + constants.TASK_TYPE_DEPLOY_TRANSFER_SOURCE_RESOURCES, execution, depends_on=[ - deploy_replica_disks_task.id]) - shutdown_deps.append(deploy_replica_source_resources_task) + deploy_transfer_disks_task.id]) + shutdown_deps.append(deploy_transfer_source_resources_task) attach_destination_minion_disks_task = None - deploy_replica_target_resources_task = None - if replica.destination_minion_pool_id: + deploy_transfer_target_resources_task = None + if transfer.destination_minion_pool_id: ttyp = constants.TASK_TYPE_ATTACH_VOLUMES_TO_DESTINATION_MINION attach_destination_minion_disks_task = self._create_task( instance, ttyp, execution, depends_on=[ - deploy_replica_disks_task.id]) + deploy_transfer_disks_task.id]) shutdown_deps.append(attach_destination_minion_disks_task) else: - deploy_replica_target_resources_task = self._create_task( + deploy_transfer_target_resources_task = self._create_task( instance, - constants.TASK_TYPE_DEPLOY_REPLICA_TARGET_RESOURCES, + constants.TASK_TYPE_DEPLOY_TRANSFER_TARGET_RESOURCES, execution, depends_on=[ - deploy_replica_disks_task.id]) - shutdown_deps.append(deploy_replica_target_resources_task) + deploy_transfer_disks_task.id]) + shutdown_deps.append(deploy_transfer_target_resources_task) depends_on = [t.id for t in shutdown_deps] if shutdown_instances: @@ -955,7 +1038,7 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): instance, constants.TASK_TYPE_REPLICATE_DISKS, execution, depends_on=depends_on) - if replica.origin_minion_pool_id: + if transfer.origin_minion_pool_id: self._create_task( instance, constants.TASK_TYPE_RELEASE_SOURCE_MINION, @@ -967,14 +1050,14 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): else: self._create_task( instance, - constants.TASK_TYPE_DELETE_REPLICA_SOURCE_RESOURCES, + constants.TASK_TYPE_DELETE_TRANSFER_SOURCE_RESOURCES, execution, depends_on=[ - deploy_replica_source_resources_task.id, + deploy_transfer_source_resources_task.id, replicate_disks_task.id], on_error=True) - if replica.destination_minion_pool_id: + if transfer.destination_minion_pool_id: detach_volumes_from_minion_task = self._create_task( instance, constants.TASK_TYPE_DETACH_VOLUMES_FROM_DESTINATION_MINION, @@ -995,162 +1078,165 @@ def execute_replica_tasks(self, ctxt, replica_id, shutdown_instances): else: self._create_task( instance, - constants.TASK_TYPE_DELETE_REPLICA_TARGET_RESOURCES, + constants.TASK_TYPE_DELETE_TRANSFER_TARGET_RESOURCES, execution, depends_on=[ - deploy_replica_target_resources_task.id, + deploy_transfer_target_resources_task.id, replicate_disks_task.id], on_error=True) - self._check_execution_tasks_sanity(execution, replica.info) + self._check_execution_tasks_sanity(execution, transfer.info) - # update the action info for all of the Replicas: + # update the action info for all of the Transfers: for instance in execution.action.instances: db_api.update_transfer_action_info_for_instance( - ctxt, replica.id, instance, replica.info[instance]) + ctxt, transfer.id, instance, transfer.info[instance]) # add new execution to DB: - db_api.add_replica_tasks_execution(ctxt, execution) - LOG.info("Replica tasks execution added to DB: %s", execution.id) + db_api.add_transfer_tasks_execution(ctxt, execution) + LOG.info("Transfer tasks execution added to DB: %s", execution.id) uses_minion_pools = any([ - replica.origin_minion_pool_id, - replica.destination_minion_pool_id]) + transfer.origin_minion_pool_id, + transfer.destination_minion_pool_id]) if uses_minion_pools: - self._minion_manager_client.allocate_minion_machines_for_replica( - ctxt, replica) + self._minion_manager_client.allocate_minion_machines_for_transfer( + ctxt, transfer) self._set_tasks_execution_status( ctxt, execution, constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) else: - self._begin_tasks(ctxt, replica, execution) + self._begin_tasks(ctxt, transfer, execution) - return self.get_replica_tasks_execution( - ctxt, replica_id, execution.id) + return self.get_transfer_tasks_execution( + ctxt, transfer_id, execution.id) - @replica_synchronized - def get_replica_tasks_executions(self, ctxt, replica_id, - include_tasks=False, - include_task_info=False): - return db_api.get_replica_tasks_executions( - ctxt, replica_id, include_tasks, + @transfer_synchronized + def get_transfer_tasks_executions(self, ctxt, transfer_id, + include_tasks=False, + include_task_info=False): + return db_api.get_transfer_tasks_executions( + ctxt, transfer_id, include_tasks, include_task_info=include_task_info, to_dict=True) @tasks_execution_synchronized - def get_replica_tasks_execution(self, ctxt, replica_id, execution_id, - include_task_info=False): - return self._get_replica_tasks_execution( - ctxt, replica_id, execution_id, + def get_transfer_tasks_execution(self, ctxt, transfer_id, execution_id, + include_task_info=False): + return self._get_transfer_tasks_execution( + ctxt, transfer_id, execution_id, include_task_info=include_task_info, to_dict=True) @tasks_execution_synchronized - def delete_replica_tasks_execution(self, ctxt, replica_id, execution_id): - execution = self._get_replica_tasks_execution( - ctxt, replica_id, execution_id) + def delete_transfer_tasks_execution(self, ctxt, transfer_id, execution_id): + execution = self._get_transfer_tasks_execution( + ctxt, transfer_id, execution_id) if execution.status in constants.ACTIVE_EXECUTION_STATUSES: - raise exception.InvalidMigrationState( - "Cannot delete execution '%s' for Replica '%s' as it is " + raise exception.InvalidActionTasksExecutionState( + "Cannot delete execution '%s' for Transfer '%s' as it is " "currently in '%s' state." % ( - execution_id, replica_id, execution.status)) - db_api.delete_replica_tasks_execution(ctxt, execution_id) + execution_id, transfer_id, execution.status)) + db_api.delete_transfer_tasks_execution(ctxt, execution_id) @tasks_execution_synchronized - def cancel_replica_tasks_execution(self, ctxt, replica_id, execution_id, - force): - execution = self._get_replica_tasks_execution( - ctxt, replica_id, execution_id) + def cancel_transfer_tasks_execution(self, ctxt, transfer_id, execution_id, + force): + execution = self._get_transfer_tasks_execution( + ctxt, transfer_id, execution_id) if execution.status not in constants.ACTIVE_EXECUTION_STATUSES: - raise exception.InvalidReplicaState( - "Replica '%s' has no running execution to cancel." % ( - replica_id)) + raise exception.InvalidTransferState( + "Transfer '%s' has no running execution to cancel." % ( + transfer_id)) if execution.status == constants.EXECUTION_STATUS_CANCELLING and ( not force): - raise exception.InvalidReplicaState( - "Replica '%s' is already being cancelled. Please use the " + raise exception.InvalidTransferState( + "Transfer '%s' is already being cancelled. Please use the " "force option if you'd like to force-cancel it." % ( - replica_id)) + transfer_id)) self._cancel_tasks_execution(ctxt, execution, force=force) - def _get_replica_tasks_execution(self, ctxt, replica_id, execution_id, - include_task_info=False, to_dict=False): - execution = db_api.get_replica_tasks_execution( - ctxt, replica_id, execution_id, + @staticmethod + def _get_transfer_tasks_execution(ctxt, transfer_id, execution_id, + include_task_info=False, to_dict=False): + execution = db_api.get_transfer_tasks_execution( + ctxt, transfer_id, execution_id, include_task_info=include_task_info, to_dict=to_dict) if not execution: raise exception.NotFound( - "Execution with ID '%s' for Replica '%s' not found." % ( - execution_id, replica_id)) + "Execution with ID '%s' for Transfer '%s' not found." % ( + execution_id, transfer_id)) return execution - def get_replicas(self, ctxt, include_tasks_executions=False, - include_task_info=False): - return db_api.get_replicas( + @staticmethod + def get_transfers(ctxt, include_tasks_executions=False, + include_task_info=False): + return db_api.get_transfers( ctxt, include_tasks_executions, include_task_info=include_task_info, to_dict=True) - @replica_synchronized - def get_replica(self, ctxt, replica_id, include_task_info=False): - return self._get_replica( - ctxt, replica_id, + @transfer_synchronized + def get_transfer(self, ctxt, transfer_id, include_task_info=False): + return self._get_transfer( + ctxt, transfer_id, include_task_info=include_task_info, to_dict=True) - @replica_synchronized - def delete_replica(self, ctxt, replica_id): - replica = self._get_replica(ctxt, replica_id) - self._check_replica_running_executions(ctxt, replica) - self._check_delete_reservation_for_transfer(replica) - db_api.delete_replica(ctxt, replica_id) + @transfer_synchronized + def delete_transfer(self, ctxt, transfer_id): + transfer = self._get_transfer(ctxt, transfer_id) + self._check_transfer_running_executions(ctxt, transfer) + self._check_delete_reservation_for_transfer(transfer) + db_api.delete_transfer(ctxt, transfer_id) - @replica_synchronized - def delete_replica_disks(self, ctxt, replica_id): - replica = self._get_replica(ctxt, replica_id, include_task_info=True) - self._check_replica_running_executions(ctxt, replica) + @transfer_synchronized + def delete_transfer_disks(self, ctxt, transfer_id): + transfer = self._get_transfer( + ctxt, transfer_id, include_task_info=True) + self._check_transfer_running_executions(ctxt, transfer) execution = models.TasksExecution() execution.id = str(uuid.uuid4()) execution.status = constants.EXECUTION_STATUS_UNEXECUTED - execution.action = replica - execution.type = constants.EXECUTION_TYPE_REPLICA_DISKS_DELETE + execution.action = transfer + execution.type = constants.EXECUTION_TYPE_TRANSFER_DISKS_DELETE has_tasks = False - for instance in replica.instances: - if (instance in replica.info and ( - replica.info[instance].get('volumes_info'))): + for instance in transfer.instances: + if (instance in transfer.info and ( + transfer.info[instance].get('volumes_info'))): source_del_task = self._create_task( instance, - constants.TASK_TYPE_DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS, + constants.TASK_TYPE_DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS, execution) self._create_task( - instance, constants.TASK_TYPE_DELETE_REPLICA_DISKS, + instance, constants.TASK_TYPE_DELETE_TRANSFER_DISKS, execution, depends_on=[source_del_task.id]) has_tasks = True if not has_tasks: - raise exception.InvalidReplicaState( - "Replica '%s' does not have volumes information for any " - "instances. Ensure that the replica has been executed " - "successfully priorly" % replica_id) + raise exception.InvalidTransferState( + "Transfer '%s' does not have volumes information for any " + "instances. Ensure that the transfer has been executed " + "successfully priorly" % transfer_id) # ensure we're passing the updated target-env options on the - # parent Replica itself in case of a Replica update: - dest_env = copy.deepcopy(replica.destination_environment) - dest_env['network_map'] = replica.network_map - dest_env['storage_mappings'] = replica.storage_mappings - for instance in replica.instances: - replica.info[instance].update({ + # parent Transfer itself in case of a Transfer update: + dest_env = copy.deepcopy(transfer.destination_environment) + dest_env['network_map'] = transfer.network_map + dest_env['storage_mappings'] = transfer.storage_mappings + for instance in transfer.instances: + transfer.info[instance].update({ "target_environment": dest_env}) - self._check_execution_tasks_sanity(execution, replica.info) + self._check_execution_tasks_sanity(execution, transfer.info) - # update the action info for all of the Replicas' instances: - for instance in replica.instances: + # update the action info for all of the Transfers' instances: + for instance in transfer.instances: db_api.update_transfer_action_info_for_instance( - ctxt, replica.id, instance, replica.info[instance]) - db_api.add_replica_tasks_execution(ctxt, execution) - LOG.info("Replica tasks execution created: %s", execution.id) + ctxt, transfer.id, instance, transfer.info[instance]) + db_api.add_transfer_tasks_execution(ctxt, execution) + LOG.info("Transfer tasks execution created: %s", execution.id) - self._begin_tasks(ctxt, replica, execution) - return self.get_replica_tasks_execution( - ctxt, replica_id, execution.id) + self._begin_tasks(ctxt, transfer, execution) + return self.get_transfer_tasks_execution( + ctxt, transfer_id, execution.id) @staticmethod def _check_endpoints(ctxt, origin_endpoint, destination_endpoint): @@ -1165,80 +1251,89 @@ def _check_endpoints(ctxt, origin_endpoint, destination_endpoint): destination_endpoint.connection_info)): raise exception.SameDestination() - def create_instances_replica(self, ctxt, origin_endpoint_id, - destination_endpoint_id, - origin_minion_pool_id, - destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, - destination_environment, instances, - network_map, storage_mappings, notes=None, - user_scripts=None): + def create_instances_transfer(self, ctxt, transfer_scenario, + origin_endpoint_id, + destination_endpoint_id, + origin_minion_pool_id, + destination_minion_pool_id, + instance_osmorphing_minion_pool_mappings, + source_environment, + destination_environment, instances, + network_map, storage_mappings, notes=None, + user_scripts=None): + supported_scenarios = [ + constants.TRANSFER_SCENARIO_REPLICA, + constants.TRANSFER_SCENARIO_LIVE_MIGRATION] + if transfer_scenario not in supported_scenarios: + raise exception.InvalidInput( + message=f"Unsupported Transfer scenario '{transfer_scenario}'." + f" Must be one of: {supported_scenarios}") + origin_endpoint = self.get_endpoint(ctxt, origin_endpoint_id) destination_endpoint = self.get_endpoint( ctxt, destination_endpoint_id) self._check_endpoints(ctxt, origin_endpoint, destination_endpoint) - replica = models.Replica() - replica.id = str(uuid.uuid4()) - replica.base_id = replica.id - replica.origin_endpoint_id = origin_endpoint_id - replica.origin_minion_pool_id = origin_minion_pool_id - replica.destination_endpoint_id = destination_endpoint_id - replica.destination_minion_pool_id = destination_minion_pool_id - replica.destination_environment = destination_environment - replica.source_environment = source_environment - replica.last_execution_status = constants.EXECUTION_STATUS_UNEXECUTED - replica.instances = instances - replica.executions = [] - replica.info = {instance: { + transfer = models.Transfer() + transfer.id = str(uuid.uuid4()) + transfer.base_id = transfer.id + transfer.scenario = transfer_scenario + transfer.origin_endpoint_id = origin_endpoint_id + transfer.origin_minion_pool_id = origin_minion_pool_id + transfer.destination_endpoint_id = destination_endpoint_id + transfer.destination_minion_pool_id = destination_minion_pool_id + transfer.destination_environment = destination_environment + transfer.source_environment = source_environment + transfer.last_execution_status = constants.EXECUTION_STATUS_UNEXECUTED + transfer.instances = instances + transfer.executions = [] + transfer.info = {instance: { 'volumes_info': []} for instance in instances} - replica.notes = notes - replica.network_map = network_map - replica.storage_mappings = storage_mappings - replica.instance_osmorphing_minion_pool_mappings = ( + transfer.notes = notes + transfer.network_map = network_map + transfer.storage_mappings = storage_mappings + transfer.instance_osmorphing_minion_pool_mappings = ( instance_osmorphing_minion_pool_mappings) - replica.user_scripts = user_scripts or {} + transfer.user_scripts = user_scripts or {} - self._check_minion_pools_for_action(ctxt, replica) + self._check_minion_pools_for_action(ctxt, transfer) - self._check_create_reservation_for_transfer( - replica, licensing_client.RESERVATION_TYPE_REPLICA) + self._create_reservation_for_transfer(transfer) - db_api.add_replica(ctxt, replica) - LOG.info("Replica created: %s", replica.id) - return self.get_replica(ctxt, replica.id) + db_api.add_transfer(ctxt, transfer) + LOG.info("Transfer created: %s", transfer.id) + return self.get_transfer(ctxt, transfer.id) - def _get_replica(self, ctxt, replica_id, include_task_info=False, - to_dict=False): - replica = db_api.get_replica( - ctxt, replica_id, include_task_info=include_task_info, + def _get_transfer(self, ctxt, transfer_id, include_task_info=False, + to_dict=False): + transfer = db_api.get_transfer( + ctxt, transfer_id, include_task_info=include_task_info, to_dict=to_dict) - if not replica: + if not transfer: raise exception.NotFound( - "Replica with ID '%s' not found." % replica_id) - return replica + "Transfer with ID '%s' not found." % transfer_id) + return transfer - def get_migrations(self, ctxt, include_tasks, - include_task_info=False): - return db_api.get_migrations( + @staticmethod + def get_deployments(ctxt, include_tasks, include_task_info=False): + return db_api.get_deployments( ctxt, include_tasks, include_task_info=include_task_info, to_dict=True) - @migration_synchronized - def get_migration(self, ctxt, migration_id, include_task_info=False): - return self._get_migration( - ctxt, migration_id, include_task_info=include_task_info, + @deployment_synchronized + def get_deployment(self, ctxt, deployment_id, include_task_info=False): + return self._get_deployment( + ctxt, deployment_id, include_task_info=include_task_info, to_dict=True) @staticmethod - def _check_running_replica_migrations(ctxt, replica_id): - migrations = db_api.get_replica_migrations(ctxt, replica_id) - if [m.id for m in migrations if m.executions[0].status in ( + def _check_running_transfer_deployments(ctxt, transfer_id): + deployments = db_api.get_transfer_deployments(ctxt, transfer_id) + if [m.id for m in deployments if m.executions[0].status in ( constants.ACTIVE_EXECUTION_STATUSES)]: - raise exception.InvalidReplicaState( - "Replica '%s' is currently being migrated" % replica_id) + raise exception.InvalidTransferState( + "Transfer '%s' is currently being deployed" % transfer_id) @staticmethod def _check_running_executions(action): @@ -1250,25 +1345,25 @@ def _check_running_executions(action): "Another tasks execution is in progress: %s" % ( running_executions)) - def _check_replica_running_executions(self, ctxt, replica): - self._check_running_executions(replica) - self._check_running_replica_migrations(ctxt, replica.id) + def _check_transfer_running_executions(self, ctxt, transfer): + self._check_running_executions(transfer) + self._check_running_transfer_deployments(ctxt, transfer.id) @staticmethod - def _check_valid_replica_tasks_execution(replica, force=False): + def _check_valid_transfer_tasks_execution(transfer, force=False): sorted_executions = sorted( - replica.executions, key=lambda e: e.number, reverse=True) + transfer.executions, key=lambda e: e.number, reverse=True) if not sorted_executions: - raise exception.InvalidReplicaState( - "The Replica has never been executed.") + raise exception.InvalidTransferState( + "The Transfer has never been executed.") if not [e for e in sorted_executions - if e.type == constants.EXECUTION_TYPE_REPLICA_EXECUTION and ( + if e.type == constants.EXECUTION_TYPE_TRANSFER_EXECUTION and ( e.status == constants.EXECUTION_STATUS_COMPLETED)]: if not force: - raise exception.InvalidReplicaState( - "A replica must have been executed successfully at least " - "once in order to be migrated") + raise exception.InvalidTransferState( + "A transfer must have been executed successfully at least " + "once in order to be deployed") def _get_provider_types(self, ctxt, endpoint): provider_types = self.get_available_providers(ctxt).get(endpoint.type) @@ -1277,85 +1372,85 @@ def _get_provider_types(self, ctxt, endpoint): "No provider found for: %s" % endpoint.type) return provider_types["types"] - @replica_synchronized - def deploy_replica_instances( - self, ctxt, replica_id, clone_disks, force, + @transfer_synchronized + def deploy_transfer_instances( + self, ctxt, transfer_id, clone_disks, force, instance_osmorphing_minion_pool_mappings=None, skip_os_morphing=False, user_scripts=None): - replica = self._get_replica(ctxt, replica_id, include_task_info=True) - self._check_reservation_for_transfer( - replica, licensing_client.RESERVATION_TYPE_REPLICA) - self._check_replica_running_executions(ctxt, replica) - self._check_valid_replica_tasks_execution(replica, force) - user_scripts = user_scripts or replica.user_scripts + transfer = self._get_transfer( + ctxt, transfer_id, include_task_info=True) + self._check_transfer_running_executions(ctxt, transfer) + self._check_valid_transfer_tasks_execution(transfer, force) + user_scripts = user_scripts or transfer.user_scripts destination_endpoint = self.get_endpoint( - ctxt, replica.destination_endpoint_id) + ctxt, transfer.destination_endpoint_id) destination_provider_types = self._get_provider_types( ctxt, destination_endpoint) - for instance, info in replica.info.items(): + for instance, info in transfer.info.items(): if not info.get("volumes_info"): - raise exception.InvalidReplicaState( - "The replica doesn't contain volumes information for " - "instance: %s. If replicated disks are deleted, the " - "replica needs to be executed anew before a migration can " - "occur" % instance) - - instances = replica.instances - - migration = models.Migration() - migration.id = str(uuid.uuid4()) - migration.base_id = migration.id - migration.origin_endpoint_id = replica.origin_endpoint_id - migration.destination_endpoint_id = replica.destination_endpoint_id + raise exception.InvalidTransferState( + "The transfer doesn't contain volumes information for " + "instance: %s. If transferred disks are deleted, the " + "transfer needs to be executed anew before a deployment" + " can occur" % instance) + + instances = transfer.instances + + deployment = models.Deployment() + deployment.id = str(uuid.uuid4()) + deployment.base_id = deployment.id + deployment.origin_endpoint_id = transfer.origin_endpoint_id + deployment.destination_endpoint_id = transfer.destination_endpoint_id # TODO(aznashwan): have these passed separately to the relevant # provider methods instead of through the dest-env: - dest_env = copy.deepcopy(replica.destination_environment) - dest_env['network_map'] = replica.network_map - dest_env['storage_mappings'] = replica.storage_mappings - migration.destination_environment = dest_env - migration.source_environment = replica.source_environment - migration.network_map = replica.network_map - migration.storage_mappings = replica.storage_mappings - migration.instances = instances - migration.replica = replica - migration.info = replica.info - migration.notes = replica.notes - migration.user_scripts = user_scripts - # NOTE: Migrations-from-Replica have no use for the source/target - # pools of the parent Replica so these can be omitted: - migration.origin_minion_pool_id = None - migration.destination_minion_pool_id = None - migration.instance_osmorphing_minion_pool_mappings = ( - replica.instance_osmorphing_minion_pool_mappings) + dest_env = copy.deepcopy(transfer.destination_environment) + dest_env['network_map'] = transfer.network_map + dest_env['storage_mappings'] = transfer.storage_mappings + deployment.destination_environment = dest_env + deployment.source_environment = transfer.source_environment + deployment.network_map = transfer.network_map + deployment.storage_mappings = transfer.storage_mappings + deployment.instances = instances + deployment.transfer = transfer + deployment.info = transfer.info + deployment.notes = transfer.notes + deployment.user_scripts = user_scripts + # NOTE: Deployments have no use for the source/target + # pools of the parent Transfer so these can be omitted: + deployment.origin_minion_pool_id = None + deployment.destination_minion_pool_id = None + deployment.instance_osmorphing_minion_pool_mappings = ( + transfer.instance_osmorphing_minion_pool_mappings) if instance_osmorphing_minion_pool_mappings: - migration.instance_osmorphing_minion_pool_mappings.update( + deployment.instance_osmorphing_minion_pool_mappings.update( instance_osmorphing_minion_pool_mappings) - self._check_minion_pools_for_action(ctxt, migration) + self._check_minion_pools_for_action(ctxt, deployment) + self._check_reservation_for_transfer(transfer) execution = models.TasksExecution() - migration.executions = [execution] + deployment.executions = [execution] execution.status = constants.EXECUTION_STATUS_UNEXECUTED execution.number = 1 - execution.type = constants.EXECUTION_TYPE_REPLICA_DEPLOY + execution.type = constants.EXECUTION_TYPE_DEPLOYMENT for instance in instances: - migration.info[instance]["clone_disks"] = clone_disks + deployment.info[instance]["clone_disks"] = clone_disks scripts = self._get_instance_scripts(user_scripts, instance) - migration.info[instance]["user_scripts"] = scripts + deployment.info[instance]["user_scripts"] = scripts # NOTE: we default/convert the volumes info to an empty list # to preserve backwards-compatibility with older versions # of Coriolis dating before the scheduling overhaul (PR##114) - if instance not in migration.info: - migration.info[instance] = {'volumes_info': []} + if instance not in deployment.info: + deployment.info[instance] = {'volumes_info': []} # NOTE: we update all of the param values before triggering an - # execution to ensure that the params on the Replica are used - # in case there was a failed Replica update (where the new values + # execution to ensure that the params on the Transfer are used + # in case there was a failed Transfer update (where the new values # could be in the `.info` field instead of the old ones) - migration.info[instance].update({ - "source_environment": migration.source_environment, + deployment.info[instance].update({ + "source_environment": deployment.source_environment, "target_environment": dest_env}) # TODO(aznashwan): have these passed separately to the relevant # provider methods (they're currently passed directly inside @@ -1363,20 +1458,20 @@ def deploy_replica_instances( # "network_map": network_map, # "storage_mappings": storage_mappings, - validate_replica_deployment_inputs_task = self._create_task( + validate_transfer_deployment_inputs_task = self._create_task( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_DEPLOYMENT_INPUTS, + constants.TASK_TYPE_VALIDATE_DEPLOYMENT_INPUTS, execution) validate_osmorphing_minion_task = None - last_validation_task = validate_replica_deployment_inputs_task + last_validation_task = validate_transfer_deployment_inputs_task if not skip_os_morphing and instance in ( - migration.instance_osmorphing_minion_pool_mappings): + deployment.instance_osmorphing_minion_pool_mappings): # NOTE: these values are required for the # _check_execution_tasks_sanity call but # will be populated later when the pool # allocations actually happen: - migration.info[instance].update({ + deployment.info[instance].update({ "osmorphing_minion_machine_id": None, "osmorphing_minion_provider_properties": None, "osmorphing_minion_connection_info": None}) @@ -1384,27 +1479,27 @@ def deploy_replica_instances( instance, constants.TASK_TYPE_VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY, # noqa: E501 execution, depends_on=[ - validate_replica_deployment_inputs_task.id]) + validate_transfer_deployment_inputs_task.id]) last_validation_task = validate_osmorphing_minion_task create_snapshot_task = self._create_task( - instance, constants.TASK_TYPE_CREATE_REPLICA_DISK_SNAPSHOTS, + instance, constants.TASK_TYPE_CREATE_TRANSFER_DISK_SNAPSHOTS, execution, depends_on=[ last_validation_task.id]) - deploy_replica_task = self._create_task( + deploy_transfer_task = self._create_task( instance, - constants.TASK_TYPE_DEPLOY_REPLICA_INSTANCE_RESOURCES, + constants.TASK_TYPE_DEPLOY_INSTANCE_RESOURCES, execution, depends_on=[create_snapshot_task.id]) - depends_on = [deploy_replica_task.id] + depends_on = [deploy_transfer_task.id] if not skip_os_morphing: task_deploy_os_morphing_resources = None attach_osmorphing_minion_volumes_task = None last_osmorphing_resources_deployment_task = None if instance in ( - migration.instance_osmorphing_minion_pool_mappings): + deployment.instance_osmorphing_minion_pool_mappings): osmorphing_vol_attachment_deps = [ validate_osmorphing_minion_task.id] osmorphing_vol_attachment_deps.extend(depends_on) @@ -1438,7 +1533,7 @@ def deploy_replica_instances( depends_on = [task_osmorphing.id] if instance in ( - migration.instance_osmorphing_minion_pool_mappings): + deployment.instance_osmorphing_minion_pool_mappings): detach_osmorphing_minion_volumes_task = self._create_task( instance, constants.TASK_TYPE_DETACH_VOLUMES_FROM_OSMORPHING_MINION, # noqa: E501 @@ -1474,13 +1569,13 @@ def deploy_replica_instances( finalize_deployment_task = self._create_task( instance, - constants.TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT, + constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT, execution, depends_on=depends_on) self._create_task( instance, - constants.TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS, + constants.TASK_TYPE_DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS, execution, depends_on=[ create_snapshot_task.id, finalize_deployment_task.id], @@ -1488,43 +1583,43 @@ def deploy_replica_instances( cleanup_deployment_task = self._create_task( instance, - constants.TASK_TYPE_CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT, + constants.TASK_TYPE_CLEANUP_FAILED_INSTANCE_DEPLOYMENT, execution, depends_on=[ - deploy_replica_task.id, + deploy_transfer_task.id, finalize_deployment_task.id], on_error_only=True) if not clone_disks: self._create_task( instance, - constants.TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS, + constants.TASK_TYPE_RESTORE_TRANSFER_DISK_SNAPSHOTS, execution, depends_on=[cleanup_deployment_task.id], on_error=True) - self._check_execution_tasks_sanity(execution, migration.info) - db_api.add_migration(ctxt, migration) - LOG.info("Migration created: %s", migration.id) + self._check_execution_tasks_sanity(execution, deployment.info) + db_api.add_deployment(ctxt, deployment) + LOG.info("Deployment created: %s", deployment.id) if not skip_os_morphing and ( - migration.instance_osmorphing_minion_pool_mappings): - # NOTE: we lock on the migration ID to ensure the minion + deployment.instance_osmorphing_minion_pool_mappings): + # NOTE: we lock on the deployment ID to ensure the minion # allocation confirmations don't come in too early: with lockutils.lock( - constants.MIGRATION_LOCK_NAME_FORMAT % migration.id, + constants.DEPLOYMENT_LOCK_NAME_FORMAT % deployment.id, external=True): (self._minion_manager_client - .allocate_minion_machines_for_migration( - ctxt, migration, include_transfer_minions=False, + .allocate_minion_machines_for_deployment( + ctxt, deployment, include_transfer_minions=False, include_osmorphing_minions=True)) self._set_tasks_execution_status( ctxt, execution, constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) else: - self._begin_tasks(ctxt, migration, execution) + self._begin_tasks(ctxt, deployment, execution) - return self.get_migration(ctxt, migration.id) + return self.get_deployment(ctxt, deployment.id) def _get_instance_scripts(self, user_scripts, instance): user_scripts = user_scripts or {} @@ -1603,553 +1698,164 @@ def _update_task_info_for_minion_allocations( db_api.update_transfer_action_info_for_instance( ctxt, action.id, instance, action.info[instance]) - def _get_last_execution_for_replica(self, ctxt, replica, requery=False): + def _get_last_execution_for_transfer(self, ctxt, transfer, requery=False): if requery: - replica = self._get_replica(ctxt, replica.id) - last_replica_execution = None - if not replica.executions: - raise exception.InvalidReplicaState( - "Replica with ID '%s' has no existing Replica " - "executions." % (replica.id)) - last_replica_execution = sorted( - replica.executions, key=lambda e: e.number)[-1] - return last_replica_execution - - def _get_execution_for_migration(self, ctxt, migration, requery=False): + transfer = self._get_transfer(ctxt, transfer.id) + last_transfer_execution = None + if not transfer.executions: + raise exception.InvalidTransferState( + "Transfer with ID '%s' has no existing Trasnfer " + "executions." % transfer.id) + last_transfer_execution = sorted( + transfer.executions, key=lambda e: e.number)[-1] + return last_transfer_execution + + def _get_execution_for_deployment(self, ctxt, deployment, requery=False): if requery: - migration = self._get_migration(ctxt, migration.id) - - if not migration.executions: - raise exception.InvalidMigrationState( - "Migration with ID '%s' has no existing executions." % ( - migration.id)) - if len(migration.executions) > 1: - raise exception.InvalidMigrationState( - "Migration with ID '%s' has more than one execution:" - " %s" % (migration.id, [e.id for e in migration.executions])) - return migration.executions[0] - - @replica_synchronized - def confirm_replica_minions_allocation( - self, ctxt, replica_id, minion_machine_allocations): - replica = self._get_replica(ctxt, replica_id, include_task_info=True) + deployment = self._get_deployment(ctxt, deployment.id) + + if not deployment.executions: + raise exception.InvalidDeploymentState( + "Deployment with ID '%s' has no existing executions." % ( + deployment.id)) + if len(deployment.executions) > 1: + raise exception.InvalidDeploymentState( + "Deployment with ID '%s' has more than one execution:" + " %s" % (deployment.id, [e.id for e in deployment.executions])) + return deployment.executions[0] + + @transfer_synchronized + def confirm_transfer_minions_allocation( + self, ctxt, transfer_id, minion_machine_allocations): + transfer = self._get_transfer( + ctxt, transfer_id, include_task_info=True) awaiting_minions_status = ( constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) - if replica.last_execution_status != awaiting_minions_status: - raise exception.InvalidReplicaState( - "Replica is in '%s' status instead of the expected '%s' to " + if transfer.last_execution_status != awaiting_minions_status: + raise exception.InvalidTransferState( + "Transfer is in '%s' status instead of the expected '%s' to " "have minion machines allocated for it." % ( - replica.last_execution_status, awaiting_minions_status)) + transfer.last_execution_status, awaiting_minions_status)) - last_replica_execution = self._get_last_execution_for_replica( - ctxt, replica, requery=False) + last_transfer_execution = self._get_last_execution_for_transfer( + ctxt, transfer, requery=False) self._update_task_info_for_minion_allocations( - ctxt, replica, minion_machine_allocations) + ctxt, transfer, minion_machine_allocations) - last_replica_execution = db_api.get_replica_tasks_execution( - ctxt, replica.id, last_replica_execution.id) + last_transfer_execution = db_api.get_transfer_tasks_execution( + ctxt, transfer.id, last_transfer_execution.id) self._begin_tasks( - ctxt, replica, last_replica_execution) + ctxt, transfer, last_transfer_execution) - @replica_synchronized - def report_replica_minions_allocation_error( - self, ctxt, replica_id, minion_allocation_error_details): - replica = self._get_replica(ctxt, replica_id) + @transfer_synchronized + def report_transfer_minions_allocation_error( + self, ctxt, transfer_id, minion_allocation_error_details): + transfer = self._get_transfer(ctxt, transfer_id) awaiting_minions_status = ( constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) - if replica.last_execution_status != awaiting_minions_status: - raise exception.InvalidReplicaState( - "Replica is in '%s' status instead of the expected '%s' to " + if transfer.last_execution_status != awaiting_minions_status: + raise exception.InvalidTransferState( + "Transfer is in '%s' status instead of the expected '%s' to " "have minion machines allocations fail for it." % ( - replica.last_execution_status, awaiting_minions_status)) + transfer.last_execution_status, awaiting_minions_status)) - last_replica_execution = self._get_last_execution_for_replica( - ctxt, replica, requery=False) + last_transfer_execution = self._get_last_execution_for_transfer( + ctxt, transfer, requery=False) LOG.warn( - "Error occured while allocating minion machines for Replica '%s'. " - "Cancelling the current Replica Execution ('%s'). Error was: %s", - replica_id, last_replica_execution.id, + "Error occurred while allocating minion machines for Transfer " + "'%s'. Cancelling the current Transfer Execution ('%s'). " + "Error was: %s", + transfer_id, last_transfer_execution.id, minion_allocation_error_details) self._cancel_tasks_execution( - ctxt, last_replica_execution, requery=True) + ctxt, last_transfer_execution, requery=True) self._set_tasks_execution_status( - ctxt, last_replica_execution, + ctxt, last_transfer_execution, constants.EXECUTION_STATUS_ERROR_ALLOCATING_MINIONS) - @migration_synchronized - def confirm_migration_minions_allocation( - self, ctxt, migration_id, minion_machine_allocations): - migration = self._get_migration( - ctxt, migration_id, include_task_info=True) + @deployment_synchronized + def confirm_deployment_minions_allocation( + self, ctxt, deployment_id, minion_machine_allocations): + deployment = self._get_deployment( + ctxt, deployment_id, include_task_info=True) awaiting_minions_status = ( constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) - if migration.last_execution_status != awaiting_minions_status: - raise exception.InvalidMigrationState( - "Migration is in '%s' status instead of the expected '%s' to " + if deployment.last_execution_status != awaiting_minions_status: + raise exception.InvalidDeploymentState( + "Deployment is in '%s' status instead of the expected '%s' to " "have minion machines allocated for it." % ( - migration.last_execution_status, awaiting_minions_status)) + deployment.last_execution_status, awaiting_minions_status)) - execution = self._get_execution_for_migration( - ctxt, migration, requery=False) + execution = self._get_execution_for_deployment( + ctxt, deployment, requery=False) self._update_task_info_for_minion_allocations( - ctxt, migration, minion_machine_allocations) - self._begin_tasks(ctxt, migration, execution) + ctxt, deployment, minion_machine_allocations) + self._begin_tasks(ctxt, deployment, execution) - @migration_synchronized - def report_migration_minions_allocation_error( - self, ctxt, migration_id, minion_allocation_error_details): - migration = self._get_migration(ctxt, migration_id) + @deployment_synchronized + def report_deployment_minions_allocation_error( + self, ctxt, deployment_id, minion_allocation_error_details): + deployment = self._get_deployment(ctxt, deployment_id) awaiting_minions_status = ( constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) - if migration.last_execution_status != awaiting_minions_status: - raise exception.InvalidMigrationState( - "Migration is in '%s' status instead of the expected '%s' to " + if deployment.last_execution_status != awaiting_minions_status: + raise exception.InvalidDeploymentState( + "Deployment is in '%s' status instead of the expected '%s' to " "have minion machines allocations fail for it." % ( - migration.last_execution_status, awaiting_minions_status)) + deployment.last_execution_status, awaiting_minions_status)) - execution = self._get_execution_for_migration( - ctxt, migration, requery=False) + execution = self._get_execution_for_deployment( + ctxt, deployment, requery=False) LOG.warn( "Error occured while allocating minion machines for " - "Migration '%s'. Cancelling the current Execution ('%s'). " + "Deployment '%s'. Cancelling the current Execution ('%s'). " "Error was: %s", - migration_id, execution.id, minion_allocation_error_details) + deployment_id, execution.id, minion_allocation_error_details) self._cancel_tasks_execution( ctxt, execution, requery=True) self._set_tasks_execution_status( ctxt, execution, constants.EXECUTION_STATUS_ERROR_ALLOCATING_MINIONS) - def migrate_instances( - self, ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, source_environment, - destination_environment, instances, network_map, storage_mappings, - replication_count, shutdown_instances=False, notes=None, - skip_os_morphing=False, user_scripts=None): - origin_endpoint = self.get_endpoint(ctxt, origin_endpoint_id) - destination_endpoint = self.get_endpoint( - ctxt, destination_endpoint_id) - self._check_endpoints(ctxt, origin_endpoint, destination_endpoint) - - destination_provider_types = self._get_provider_types( - ctxt, destination_endpoint) - - migration = models.Migration() - migration.id = str(uuid.uuid4()) - migration.base_id = migration.id - migration.origin_endpoint_id = origin_endpoint_id - migration.destination_endpoint_id = destination_endpoint_id - migration.destination_environment = destination_environment - migration.source_environment = source_environment - migration.network_map = network_map - migration.storage_mappings = storage_mappings - migration.last_execution_status = constants.EXECUTION_STATUS_UNEXECUTED - execution = models.TasksExecution() - execution.status = constants.EXECUTION_STATUS_UNEXECUTED - execution.number = 1 - execution.type = constants.EXECUTION_TYPE_MIGRATION - migration.executions = [execution] - migration.instances = instances - migration.info = {} - migration.user_scripts = user_scripts or {} - migration.notes = notes - migration.shutdown_instances = shutdown_instances - migration.replication_count = replication_count - migration.origin_minion_pool_id = origin_minion_pool_id - migration.destination_minion_pool_id = destination_minion_pool_id - if instance_osmorphing_minion_pool_mappings is None: - instance_osmorphing_minion_pool_mappings = {} - migration.instance_osmorphing_minion_pool_mappings = ( - instance_osmorphing_minion_pool_mappings) - - self._check_create_reservation_for_transfer( - migration, licensing_client.RESERVATION_TYPE_MIGRATION) - - self._check_minion_pools_for_action(ctxt, migration) - - for instance in instances: - migration.info[instance] = { - "volumes_info": [], - "source_environment": source_environment, - "target_environment": destination_environment, - "user_scripts": self._get_instance_scripts( - user_scripts, instance), - # NOTE: we must explicitly set this in each VM's info - # to prevent the Replica disks from being cloned: - "clone_disks": False} - # TODO(aznashwan): have these passed separately to the relevant - # provider methods (they're currently passed directly inside - # dest-env by the API service when accepting the call) - # "network_map": network_map, - # "storage_mappings": storage_mappings, - - get_instance_info_task = self._create_task( - instance, - constants.TASK_TYPE_GET_INSTANCE_INFO, - execution) - - validate_migration_source_inputs_task = self._create_task( - instance, - constants.TASK_TYPE_VALIDATE_MIGRATION_SOURCE_INPUTS, - execution) - - validate_migration_destination_inputs_task = self._create_task( - instance, - constants.TASK_TYPE_VALIDATE_MIGRATION_DESTINATION_INPUTS, - execution, - depends_on=[get_instance_info_task.id]) - - migration_resources_task_ids = [] - validate_origin_minion_task = None - deploy_migration_source_resources_task = None - migration_resources_task_deps = [ - get_instance_info_task.id, - validate_migration_source_inputs_task.id] - if migration.origin_minion_pool_id: - # NOTE: these values are required for the - # _check_execution_tasks_sanity call but - # will be populated later when the pool - # allocations actually happen: - migration.info[instance].update({ - "origin_minion_machine_id": None, - "origin_minion_provider_properties": None, - "origin_minion_connection_info": None}) - validate_origin_minion_task = self._create_task( - instance, - constants.TASK_TYPE_VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY, # noqa: E501 - execution, - depends_on=migration_resources_task_deps) - migration_resources_task_ids.append( - validate_origin_minion_task.id) - else: - deploy_migration_source_resources_task = self._create_task( - instance, - constants.TASK_TYPE_DEPLOY_MIGRATION_SOURCE_RESOURCES, - execution, depends_on=migration_resources_task_deps) - migration_resources_task_ids.append( - deploy_migration_source_resources_task.id) - - create_instance_disks_task = self._create_task( - instance, constants.TASK_TYPE_CREATE_INSTANCE_DISKS, - execution, depends_on=[ - validate_migration_source_inputs_task.id, - validate_migration_destination_inputs_task.id]) - - validate_destination_minion_task = None - attach_destination_minion_disks_task = None - deploy_migration_target_resources_task = None - if migration.destination_minion_pool_id: - # NOTE: these values are required for the - # _check_execution_tasks_sanity call but - # will be populated later when the pool - # allocations actually happen: - migration.info[instance].update({ - "destination_minion_machine_id": None, - "destination_minion_provider_properties": None, - "destination_minion_connection_info": None, - "destination_minion_backup_writer_connection_info": None}) - ttyp = ( - constants.TASK_TYPE_VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY) # noqa: E501 - validate_destination_minion_task = self._create_task( - instance, ttyp, execution, depends_on=[ - validate_migration_destination_inputs_task.id]) - - attach_destination_minion_disks_task = self._create_task( - instance, - constants.TASK_TYPE_ATTACH_VOLUMES_TO_DESTINATION_MINION, - execution, depends_on=[ - validate_destination_minion_task.id, - create_instance_disks_task.id]) - migration_resources_task_ids.append( - attach_destination_minion_disks_task.id) - else: - deploy_migration_target_resources_task = self._create_task( - instance, - constants.TASK_TYPE_DEPLOY_MIGRATION_TARGET_RESOURCES, - execution, depends_on=[create_instance_disks_task.id]) - migration_resources_task_ids.append( - deploy_migration_target_resources_task.id) - - validate_osmorphing_minion_task = None - if not skip_os_morphing and ( - instance in instance_osmorphing_minion_pool_mappings): - # NOTE: these values are required for the - # _check_execution_tasks_sanity call but - # will be populated later when the pool - # allocations actually happen: - migration.info[instance].update({ - "osmorphing_minion_machine_id": None, - "osmorphing_minion_provider_properties": None, - "osmorphing_minion_connection_info": None}) - validate_osmorphing_minion_task = self._create_task( - instance, - constants.TASK_TYPE_VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY, # noqa: E501 - execution, depends_on=[ - validate_migration_destination_inputs_task.id]) - migration_resources_task_ids.append( - validate_osmorphing_minion_task.id) - - last_sync_task = None - first_sync_task = None - for i in range(migration.replication_count): - # insert SHUTDOWN_INSTANCES task before the last sync: - if i == (migration.replication_count - 1) and ( - migration.shutdown_instances): - shutdown_deps = migration_resources_task_ids - if last_sync_task: - shutdown_deps = [last_sync_task.id] - last_sync_task = self._create_task( - instance, constants.TASK_TYPE_SHUTDOWN_INSTANCE, - execution, depends_on=shutdown_deps) - - replication_deps = migration_resources_task_ids - if last_sync_task: - replication_deps = [last_sync_task.id] - - last_sync_task = self._create_task( - instance, constants.TASK_TYPE_REPLICATE_DISKS, - execution, depends_on=replication_deps) - if not first_sync_task: - first_sync_task = last_sync_task - - release_origin_minion_task = None - delete_source_resources_task = None - source_resource_cleanup_task = None - if migration.origin_minion_pool_id: - release_origin_minion_task = self._create_task( - instance, - constants.TASK_TYPE_RELEASE_SOURCE_MINION, # noqa: E501 - execution, - depends_on=[ - validate_origin_minion_task.id, - last_sync_task.id], - on_error=True) - source_resource_cleanup_task = release_origin_minion_task - else: - delete_source_resources_task = self._create_task( - instance, - constants.TASK_TYPE_DELETE_MIGRATION_SOURCE_RESOURCES, - execution, depends_on=[ - deploy_migration_source_resources_task.id, - last_sync_task.id], - on_error=True) - source_resource_cleanup_task = delete_source_resources_task - - cleanup_source_storage_task = self._create_task( - instance, constants.TASK_TYPE_CLEANUP_INSTANCE_SOURCE_STORAGE, - execution, depends_on=[ - first_sync_task.id, - source_resource_cleanup_task.id], - on_error=True) - - target_resources_cleanup_task = None - if migration.destination_minion_pool_id: - detach_volumes_from_destination_minion_task = ( - self._create_task( - instance, - constants.TASK_TYPE_DETACH_VOLUMES_FROM_DESTINATION_MINION, # noqa: E501 - execution, - depends_on=[ - attach_destination_minion_disks_task.id, - last_sync_task.id], - on_error=True)) - - release_destination_minion_task = self._create_task( - instance, - constants.TASK_TYPE_RELEASE_DESTINATION_MINION, - execution, depends_on=[ - validate_destination_minion_task.id, - detach_volumes_from_destination_minion_task.id], - on_error=True) - target_resources_cleanup_task = release_destination_minion_task - else: - delete_destination_resources_task = self._create_task( - instance, - constants.TASK_TYPE_DELETE_MIGRATION_TARGET_RESOURCES, - execution, depends_on=[ - deploy_migration_target_resources_task.id, - last_sync_task.id], - on_error=True) - target_resources_cleanup_task = ( - delete_destination_resources_task) - - deploy_instance_task = self._create_task( - instance, constants.TASK_TYPE_DEPLOY_INSTANCE_RESOURCES, - execution, depends_on=[ - last_sync_task.id, - target_resources_cleanup_task.id]) - - depends_on = [deploy_instance_task.id] - osmorphing_resources_cleanup_task = None - if not skip_os_morphing: - task_deploy_os_morphing_resources = None - task_delete_os_morphing_resources = None - attach_osmorphing_minion_volumes_task = None - last_osmorphing_resources_deployment_task = None - if instance in ( - migration.instance_osmorphing_minion_pool_mappings): - osmorphing_vol_attachment_deps = [ - validate_osmorphing_minion_task.id] - osmorphing_vol_attachment_deps.extend(depends_on) - attach_osmorphing_minion_volumes_task = self._create_task( - instance, - constants.TASK_TYPE_ATTACH_VOLUMES_TO_OSMORPHING_MINION, # noqa: E501 - execution, depends_on=osmorphing_vol_attachment_deps) - last_osmorphing_resources_deployment_task = ( - attach_osmorphing_minion_volumes_task) - - collect_osmorphing_info_task = self._create_task( - instance, - constants.TASK_TYPE_COLLECT_OSMORPHING_INFO, - execution, - depends_on=[attach_osmorphing_minion_volumes_task.id]) - last_osmorphing_resources_deployment_task = ( - collect_osmorphing_info_task) - else: - task_deploy_os_morphing_resources = self._create_task( - instance, - constants.TASK_TYPE_DEPLOY_OS_MORPHING_RESOURCES, - execution, depends_on=depends_on) - last_osmorphing_resources_deployment_task = ( - task_deploy_os_morphing_resources) - - task_osmorphing = self._create_task( - instance, constants.TASK_TYPE_OS_MORPHING, - execution, depends_on=[ - last_osmorphing_resources_deployment_task.id]) - - depends_on = [task_osmorphing.id] - - if instance in ( - migration.instance_osmorphing_minion_pool_mappings): - detach_osmorphing_minion_volumes_task = self._create_task( - instance, - constants.TASK_TYPE_DETACH_VOLUMES_FROM_OSMORPHING_MINION, # noqa: E501 - execution, depends_on=[ - attach_osmorphing_minion_volumes_task.id, - task_osmorphing.id], - on_error=True) - - release_osmorphing_minion_task = self._create_task( - instance, - constants.TASK_TYPE_RELEASE_OSMORPHING_MINION, - execution, depends_on=[ - validate_osmorphing_minion_task.id, - detach_osmorphing_minion_volumes_task.id], - on_error=True) - depends_on.append(release_osmorphing_minion_task.id) - osmorphing_resources_cleanup_task = ( - release_osmorphing_minion_task) - else: - task_delete_os_morphing_resources = ( - self._create_task( - instance, constants.TASK_TYPE_DELETE_OS_MORPHING_RESOURCES, # noqa: E501 - execution, depends_on=[ - task_deploy_os_morphing_resources.id, - task_osmorphing.id], - on_error=True)) - - depends_on.append(task_delete_os_morphing_resources.id) - osmorphing_resources_cleanup_task = ( - task_delete_os_morphing_resources) - - if (constants.PROVIDER_TYPE_INSTANCE_FLAVOR in - destination_provider_types): - get_optimal_flavor_task = self._create_task( - instance, constants.TASK_TYPE_GET_OPTIMAL_FLAVOR, - execution, depends_on=depends_on) - depends_on = [get_optimal_flavor_task.id] - - finalize_deployment_task = self._create_task( - instance, - constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT, - execution, depends_on=depends_on) - - cleanup_failed_deployment_task = self._create_task( - instance, - constants.TASK_TYPE_CLEANUP_FAILED_INSTANCE_DEPLOYMENT, - execution, depends_on=[ - deploy_instance_task.id, - finalize_deployment_task.id], - on_error_only=True) - - cleanup_deps = [ - create_instance_disks_task.id, - cleanup_source_storage_task.id, - target_resources_cleanup_task.id, - cleanup_failed_deployment_task.id] - if osmorphing_resources_cleanup_task: - cleanup_deps.append(osmorphing_resources_cleanup_task.id) - self._create_task( - instance, constants.TASK_TYPE_CLEANUP_INSTANCE_TARGET_STORAGE, - execution, depends_on=cleanup_deps, - on_error_only=True) - - self._check_execution_tasks_sanity(execution, migration.info) - db_api.add_migration(ctxt, migration) - LOG.info("Migration added to DB: %s", migration.id) - - uses_minion_pools = any([ - migration.origin_minion_pool_id, - migration.destination_minion_pool_id, - migration.instance_osmorphing_minion_pool_mappings]) - if uses_minion_pools: - # NOTE: we lock on the migration ID to ensure the minion - # allocation confirmations don't come in too early: - with lockutils.lock( - constants.MIGRATION_LOCK_NAME_FORMAT % migration.id, - external=True): - (self._minion_manager_client - .allocate_minion_machines_for_migration( - ctxt, migration, include_transfer_minions=True, - include_osmorphing_minions=not skip_os_morphing) - ) - self._set_tasks_execution_status( - ctxt, execution, - constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS) - else: - self._begin_tasks(ctxt, migration, execution) - - return self.get_migration(ctxt, migration.id) - - def _get_migration(self, ctxt, migration_id, include_task_info=False, - to_dict=False): - migration = db_api.get_migration( - ctxt, migration_id, include_task_info=include_task_info, + def _get_deployment(self, ctxt, deployment_id, include_task_info=False, + to_dict=False): + deployment = db_api.get_deployment( + ctxt, deployment_id, include_task_info=include_task_info, to_dict=to_dict) - if not migration: + if not deployment: raise exception.NotFound( - "Migration with ID '%s' not found." % migration_id) - return migration + "Deployment with ID '%s' not found." % deployment_id) + return deployment - @migration_synchronized - def delete_migration(self, ctxt, migration_id): - migration = self._get_migration(ctxt, migration_id) - execution = migration.executions[0] + def _delete_deployment(self, ctxt, deployment_id): + deployment = self._get_deployment(ctxt, deployment_id) + execution = deployment.executions[0] if execution.status in constants.ACTIVE_EXECUTION_STATUSES: - raise exception.InvalidMigrationState( - "Cannot delete Migration '%s' as it is currently in " - "'%s' state." % (migration_id, execution.status)) - db_api.delete_migration(ctxt, migration_id) - - @migration_synchronized - def cancel_migration(self, ctxt, migration_id, force): - migration = self._get_migration(ctxt, migration_id) - if len(migration.executions) != 1: - raise exception.InvalidMigrationState( - "Migration '%s' has in improper number of tasks " - "executions: %d" % (migration_id, len(migration.executions))) - execution = migration.executions[0] + raise exception.InvalidDeploymentState( + "Cannot delete Deployment '%s' as it is currently in " + "'%s' state." % (deployment_id, execution.status)) + db_api.delete_deployment(ctxt, deployment_id) + + @deployment_synchronized + def delete_deployment(self, ctxt, deployment_id): + self._delete_deployment(ctxt, deployment_id) + + def _cancel_deployment(self, ctxt, deployment_id, force): + deployment = self._get_deployment(ctxt, deployment_id) + if len(deployment.executions) != 1: + raise exception.InvalidDeploymentState( + "Deployment '%s' has in improper number of tasks " + "executions: %d" % (deployment_id, len(deployment.executions))) + execution = deployment.executions[0] if execution.status not in constants.ACTIVE_EXECUTION_STATUSES: - raise exception.InvalidMigrationState( - "Migration '%s' is not currently running" % migration_id) + raise exception.InvalidDeploymentState( + "Deployment '%s' is not currently running" % deployment_id) if execution.status == constants.EXECUTION_STATUS_CANCELLING and ( not force): - raise exception.InvalidMigrationState( - "Migration '%s' is already being cancelled. Please use the " + raise exception.InvalidDeploymentState( + "Deployment '%s' is already being cancelled. Please use the " "force option if you'd like to force-cancel it.") with lockutils.lock( @@ -2157,6 +1863,10 @@ def cancel_migration(self, ctxt, migration_id, force): external=True): self._cancel_tasks_execution(ctxt, execution, force=force) + @deployment_synchronized + def cancel_deployment(self, ctxt, deployment_id, force): + self._cancel_deployment(ctxt, deployment_id, force) + def _cancel_tasks_execution( self, ctxt, execution, requery=True, force=False): """ Cancels a running Execution by: @@ -2328,11 +2038,61 @@ def _cancel_tasks_execution( "No new tasks were started for execution '%s' following " "state advancement after cancellation.", execution.id) + def _update_reservation_fulfillment_for_execution(self, ctxt, execution): + """ Updates the reservation fulfillment status for the parent + transfer action of the given execution based on its type. + + Replica transfers are marked as fulfilled as soon as a Transfer + Execution is successfully completed. + Live migration transfers are marked as fulfilled as soon as they + are deployed for the first (and only) time. + """ + if execution.type not in ( + constants.EXECUTION_TYPE_TRANSFER_EXECUTION, + constants.EXECUTION_TYPE_DEPLOYMENT): + LOG.debug( + f"Skipping setting reservation fulfillment for execution " + f"'{execution.id}' of type '{execution.type}'.") + return + + if execution.type not in ( + constants.EXECUTION_TYPE_TRANSFER_EXECUTION, + constants.EXECUTION_TYPE_DEPLOYMENT): + LOG.debug( + f"Skipping setting transfer fulfillment for execution " + f"'{execution.id}' of type '{execution.type}'.") + return + + transfer_action = execution.action + transfer_id = transfer_action.base_id + if transfer_action.type == constants.TRANSFER_ACTION_TYPE_DEPLOYMENT: + deployment = self._get_deployment(ctxt, transfer_id) + transfer_id = deployment.transfer_id + transfer_action = self._get_transfer( + ctxt, transfer_id, include_task_info=False) + else: + transfer_action = self._get_transfer( + ctxt, execution.action_id, include_task_info=False) + + scenario = transfer_action.scenario + if scenario == constants.TRANSFER_SCENARIO_REPLICA and ( + execution.type == constants.EXECUTION_TYPE_TRANSFER_EXECUTION): + self._check_mark_reservation_fulfilled( + transfer_action, must_unfulfilled=False) + elif scenario == constants.TRANSFER_SCENARIO_LIVE_MIGRATION and ( + execution.type == constants.EXECUTION_TYPE_DEPLOYMENT): + self._check_mark_reservation_fulfilled( + transfer_action, must_unfulfilled=False) + else: + LOG.debug( + f"Skipping setting transfer fulfillment for execution " + f"'{execution.id}' of type '{execution.type}' on parent" + f"action {transfer_id} of scenario type " + f"{transfer_action.scenario}.") + def _set_tasks_execution_status( self, ctxt, execution, new_execution_status): previous_execution_status = execution.status - execution = db_api.set_execution_status( - ctxt, execution.id, new_execution_status) LOG.info( "Tasks execution %(id)s (action %(action)s) status updated " "from %(old_status)s to %(new_status)s", @@ -2340,6 +2100,10 @@ def _set_tasks_execution_status( "action": execution.action_id, "old_status": previous_execution_status}) + if new_execution_status == constants.EXECUTION_STATUS_COMPLETED: + self._update_reservation_fulfillment_for_execution( + ctxt, execution) + if new_execution_status in constants.FINALIZED_EXECUTION_STATUSES: # NOTE(aznashwan): because the taskflow flows within the minion # manager cannot [currently] be cancelled and are destined to @@ -2380,6 +2144,9 @@ def _set_tasks_execution_status( execution.id, execution.type, execution.action_id, new_execution_status) + execution = db_api.set_execution_status( + ctxt, execution.id, new_execution_status) + @parent_tasks_execution_synchronized def set_task_host(self, ctxt, task_id, host): """ Saves the ID of the worker host which has accepted @@ -2591,7 +2358,7 @@ def _advance_execution_state( requery=not requery) == ( constants.EXECUTION_STATUS_DEADLOCKED): LOG.error( - "Execution '%s' deadlocked even before Replica state " + "Execution '%s' deadlocked even before Transfer state " "advancement . Cleanup has been perfomed. Returning.", execution.id) return [] @@ -2820,9 +2587,9 @@ def _start_task(task): ctxt, execution, task_statuses=task_statuses) == ( constants.EXECUTION_STATUS_DEADLOCKED): LOG.error( - "Execution '%s' deadlocked after Replica state advancement" - ". Cleanup has been perfomed. Returning early.", - execution.id) + "Execution '%s' deadlocked after Transfer state " + "advancement. Cleanup has been performed. " + "Returning early.", execution.id) return [] LOG.debug( "No new tasks were started for execution '%s'", execution.id) @@ -2846,26 +2613,27 @@ def _start_task(task): return started_tasks - def _update_replica_volumes_info(self, ctxt, replica_id, instance, - updated_task_info): - """ WARN: the lock for the Replica must be pre-acquired. """ + @staticmethod + def _update_transfer_volumes_info(ctxt, transfer_id, instance, + updated_task_info): + """ WARN: the lock for the Transfer must be pre-acquired. """ db_api.update_transfer_action_info_for_instance( - ctxt, replica_id, instance, + ctxt, transfer_id, instance, updated_task_info) - def _update_volumes_info_for_migration_parent_replica( - self, ctxt, migration_id, instance, updated_task_info): - migration = db_api.get_migration(ctxt, migration_id) - replica_id = migration.replica_id + def _update_volumes_info_for_deployment_parent_transfer( + self, ctxt, deployment_id, instance, updated_task_info): + deployment = db_api.get_deployment(ctxt, deployment_id) + transfer_id = deployment.transfer_id with lockutils.lock( - constants.REPLICA_LOCK_NAME_FORMAT % replica_id, + constants.TRANSFER_LOCK_NAME_FORMAT % transfer_id, external=True): LOG.debug( - "Updating volume_info in replica due to snapshot " - "restore during migration. replica id: %s", replica_id) - self._update_replica_volumes_info( - ctxt, replica_id, instance, updated_task_info) + "Updating volume_info in transfer due to snapshot " + "restore during deployment. transfer id: %s", transfer_id) + self._update_transfer_volumes_info( + ctxt, transfer_id, instance, updated_task_info) def _handle_post_task_actions(self, ctxt, task, execution, task_info): task_type = task.task_type @@ -2880,11 +2648,11 @@ def _check_other_tasks_running(execution, current_task): break return still_running - if task_type == constants.TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS: + if task_type == constants.TASK_TYPE_RESTORE_TRANSFER_DISK_SNAPSHOTS: # When restoring a snapshot in some import providers (OpenStack), # a new volume_id is generated. This needs to be updated in the - # Replica instance as well. + # Transfer instance as well. volumes_info = task_info.get('volumes_info') if not volumes_info: LOG.warn( @@ -2898,30 +2666,28 @@ def _check_other_tasks_running(execution, current_task): task.instance, execution.action_id, task.id, task_type, utils.sanitize_task_info( {'volumes_info': volumes_info})) - self._update_volumes_info_for_migration_parent_replica( + self._update_volumes_info_for_deployment_parent_transfer( ctxt, execution.action_id, task.instance, {"volumes_info": volumes_info}) elif task_type == ( - constants.TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS): + constants.TASK_TYPE_DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS): if not task_info.get("clone_disks"): - # The migration completed. If the replica is executed again, - # new volumes need to be deployed in place of the migrated + # The deployment completed. If the transfer is executed again, + # new volumes need to be created in place of the deployed # ones. LOG.info( - "Unsetting 'volumes_info' for instance '%s' in Replica " - "'%s' after completion of Replica task '%s' (type '%s') " - "with clone_disks=False.", + "Unsetting 'volumes_info' for instance '%s' in Transfer " + "'%s' after completion of Transfer task '%s' " + "(type '%s') with clone_disks=False.", task.instance, execution.action_id, task.id, task_type) - self._update_volumes_info_for_migration_parent_replica( + self._update_volumes_info_for_deployment_parent_transfer( ctxt, execution.action_id, task.instance, {"volumes_info": []}) - elif task_type in ( - constants.TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT, - constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT): + elif task_type == constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT: # set 'transfer_result' in the 'base_transfer_action' # table if the task returned a result. if "transfer_result" in task_info: @@ -2947,33 +2713,33 @@ def _check_other_tasks_running(execution, current_task): "No 'transfer_result' was returned for task type '%s' " "for transfer action '%s'", task_type, execution.action_id) elif task_type in ( - constants.TASK_TYPE_UPDATE_SOURCE_REPLICA, - constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA): + constants.TASK_TYPE_UPDATE_SOURCE_TRANSFER, + constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER): # NOTE: remember to update the `volumes_info`: # NOTE: considering this method is only called with a lock on the - # `execution.action_id` (in a Replica update tasks' case that's the - # ID of the Replica itself) we can safely call - # `_update_replica_volumes_info` below: - self._update_replica_volumes_info( + # `execution.action_id` (in a Transfer update tasks' case that's + # the ID of the Transfer itself) we can safely call + # `_update_transfer_volumes_info` below: + self._update_transfer_volumes_info( ctxt, execution.action_id, task.instance, {"volumes_info": task_info.get("volumes_info", [])}) - if task_type == constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA: + if task_type == constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER: # check if this was the last task in the update execution: still_running = _check_other_tasks_running(execution, task) if not still_running: # it means this was the last update task in the Execution - # and we may safely update the params of the Replica + # and we may safely update the params of the Transfer # as they are in the DB: LOG.info( - "All tasks of the '%s' Replica update procedure have " + "All tasks of the '%s' Transfer update procedure have " "completed successfully. Setting the updated " - "parameter values on the parent Replica itself.", + "parameter values on the parent Transfer itself.", execution.action_id) - # NOTE: considering all the instances of the Replica get + # NOTE: considering all the instances of the Transfer get # the same params, it doesn't matter which instance's # update task finishes last: - db_api.update_replica( + db_api.update_transfer( ctxt, execution.action_id, task_info) elif task_type in ( @@ -3300,10 +3066,6 @@ def confirm_task_cancellation(self, ctxt, task_id, cancellation_details): "confirmation of its cancellation.", task.id, task.status, final_status) execution = db_api.get_tasks_execution(ctxt, task.execution_id) - if execution.type == constants.EXECUTION_TYPE_MIGRATION: - action = db_api.get_action( - ctxt, execution.action_id, include_task_info=False) - self._check_delete_reservation_for_transfer(action) self._advance_execution_state(ctxt, execution, requery=False) @parent_tasks_execution_synchronized @@ -3406,7 +3168,7 @@ def set_task_error(self, ctxt, task_id, exception_details): "connection info. Original error was: %s" % ( exception_details))) LOG.warn( - "All subtasks for Migration '%s' have been cancelled " + "All subtasks for Deployment '%s' have been cancelled " "to allow for OSMorphing debugging. The connection " "info for the worker VM is: %s", action_id, action.info.get(task.instance, {}).get( @@ -3423,11 +3185,6 @@ def set_task_error(self, ctxt, task_id, exception_details): else: self._cancel_tasks_execution(ctxt, execution) - # NOTE: if this was a migration, make sure to delete - # its associated reservation. - if execution.type == constants.EXECUTION_TYPE_MIGRATION: - self._check_delete_reservation_for_transfer(action) - @task_synchronized def add_task_event(self, ctxt, task_id, level, message): LOG.info("Adding event for task '%s': %s", task_id, message) @@ -3467,97 +3224,98 @@ def update_task_progress_update( ctxt, task_id, progress_update_index, new_current_step, new_total_steps=new_total_steps, new_message=new_message) - def _get_replica_schedule(self, ctxt, replica_id, - schedule_id, expired=True): - schedule = db_api.get_replica_schedule( - ctxt, replica_id, schedule_id, expired=expired) + @staticmethod + def _get_transfer_schedule(ctxt, transfer_id, schedule_id, expired=True): + schedule = db_api.get_transfer_schedule( + ctxt, transfer_id, schedule_id, expired=expired) if not schedule: raise exception.NotFound( - "Schedule with ID '%s' for Replica '%s' not found." % ( - schedule_id, replica_id)) + "Schedule with ID '%s' for Transfer '%s' not found." % ( + schedule_id, transfer_id)) return schedule - def create_replica_schedule(self, ctxt, replica_id, - schedule, enabled, exp_date, - shutdown_instance): + def create_transfer_schedule(self, ctxt, transfer_id, + schedule, enabled, exp_date, + shutdown_instance): keystone.create_trust(ctxt) - replica = self._get_replica(ctxt, replica_id) - replica_schedule = models.ReplicaSchedule() - replica_schedule.id = str(uuid.uuid4()) - replica_schedule.replica = replica - replica_schedule.replica_id = replica_id - replica_schedule.schedule = schedule - replica_schedule.expiration_date = exp_date - replica_schedule.enabled = enabled - replica_schedule.shutdown_instance = shutdown_instance - replica_schedule.trust_id = ctxt.trust_id - - db_api.add_replica_schedule( - ctxt, replica_schedule, - lambda ctxt, sched: self._replica_cron_client.register( + transfer = self._get_transfer(ctxt, transfer_id) + transfer_schedule = models.TransferSchedule() + transfer_schedule.id = str(uuid.uuid4()) + transfer_schedule.transfer = transfer + transfer_schedule.transfer_id = transfer_id + transfer_schedule.schedule = schedule + transfer_schedule.expiration_date = exp_date + transfer_schedule.enabled = enabled + transfer_schedule.shutdown_instance = shutdown_instance + transfer_schedule.trust_id = ctxt.trust_id + + db_api.add_transfer_schedule( + ctxt, transfer_schedule, + lambda ctxt, sched: self._transfer_cron_client.register( ctxt, sched)) - return self.get_replica_schedule( - ctxt, replica_id, replica_schedule.id) + return self.get_transfer_schedule( + ctxt, transfer_id, transfer_schedule.id) @schedule_synchronized - def update_replica_schedule(self, ctxt, replica_id, schedule_id, - updated_values): - db_api.update_replica_schedule( - ctxt, replica_id, schedule_id, updated_values, None, - lambda ctxt, sched: self._replica_cron_client.register( + def update_transfer_schedule(self, ctxt, transfer_id, schedule_id, + updated_values): + db_api.update_transfer_schedule( + ctxt, transfer_id, schedule_id, updated_values, None, + lambda ctxt, sched: self._transfer_cron_client.register( ctxt, sched)) - return self._get_replica_schedule(ctxt, replica_id, schedule_id) + return self._get_transfer_schedule(ctxt, transfer_id, schedule_id) def _cleanup_schedule_resources(self, ctxt, schedule): - self._replica_cron_client.unregister(ctxt, schedule) + self._transfer_cron_client.unregister(ctxt, schedule) if schedule.trust_id: tmp_trust = context.get_admin_context( trust_id=schedule.trust_id) keystone.delete_trust(tmp_trust) @schedule_synchronized - def delete_replica_schedule(self, ctxt, replica_id, schedule_id): - replica = self._get_replica(ctxt, replica_id) - replica_status = replica.last_execution_status + def delete_transfer_schedule(self, ctxt, transfer_id, schedule_id): + transfer = self._get_transfer(ctxt, transfer_id) + transfer_status = transfer.last_execution_status valid_statuses = list(itertools.chain( constants.FINALIZED_EXECUTION_STATUSES, [constants.EXECUTION_STATUS_UNEXECUTED])) - if replica_status not in valid_statuses: - raise exception.InvalidReplicaState( - 'Replica Schedule cannot be deleted while the Replica is in ' - '%s state. Please wait for the Replica execution to finish' % - (replica_status)) - db_api.delete_replica_schedule( - ctxt, replica_id, schedule_id, None, + if transfer_status not in valid_statuses: + raise exception.InvalidTransferState( + 'Transfer Schedule cannot be deleted while the Transfer is in ' + '%s state. Please wait for the Transfer execution to finish' % + (transfer_status)) + db_api.delete_transfer_schedule( + ctxt, transfer_id, schedule_id, None, lambda ctxt, sched: self._cleanup_schedule_resources( ctxt, sched)) - @replica_synchronized - def get_replica_schedules(self, ctxt, replica_id=None, expired=True): - return db_api.get_replica_schedules( - ctxt, replica_id=replica_id, expired=expired) + @transfer_synchronized + def get_transfer_schedules(self, ctxt, transfer_id=None, expired=True): + return db_api.get_transfer_schedules( + ctxt, transfer_id=transfer_id, expired=expired) @schedule_synchronized - def get_replica_schedule(self, ctxt, replica_id, - schedule_id, expired=True): - return self._get_replica_schedule( - ctxt, replica_id, schedule_id, expired=expired) + def get_transfer_schedule(self, ctxt, transfer_id, + schedule_id, expired=True): + return self._get_transfer_schedule( + ctxt, transfer_id, schedule_id, expired=expired) - @replica_synchronized - def update_replica( - self, ctxt, replica_id, updated_properties): - replica = self._get_replica(ctxt, replica_id, include_task_info=True) + @transfer_synchronized + def update_transfer( + self, ctxt, transfer_id, updated_properties): + transfer = self._get_transfer( + ctxt, transfer_id, include_task_info=True) minion_pool_fields = [ "origin_minion_pool_id", "destination_minion_pool_id", "instance_osmorphing_minion_pool_mappings"] if any([mpf in updated_properties for mpf in minion_pool_fields]): - # NOTE: this is just a dummy Replica model to use for validation: - dummy = models.Replica() - dummy.id = replica.id - dummy.instances = replica.instances - dummy.origin_endpoint_id = replica.origin_endpoint_id - dummy.destination_endpoint_id = replica.destination_endpoint_id + # NOTE: this is just a dummy Transfer model to use for validation: + dummy = models.Transfer() + dummy.id = transfer.id + dummy.instances = transfer.instances + dummy.origin_endpoint_id = transfer.origin_endpoint_id + dummy.destination_endpoint_id = transfer.destination_endpoint_id dummy.origin_minion_pool_id = updated_properties.get( 'origin_minion_pool_id') dummy.destination_minion_pool_id = updated_properties.get( @@ -3567,33 +3325,33 @@ def update_replica( 'instance_osmorphing_minion_pool_mappings')) self._check_minion_pools_for_action(ctxt, dummy) - self._check_replica_running_executions(ctxt, replica) - self._check_valid_replica_tasks_execution(replica, force=True) + self._check_transfer_running_executions(ctxt, transfer) + self._check_valid_transfer_tasks_execution(transfer, force=True) if updated_properties.get('user_scripts'): - replica.user_scripts = updated_properties['user_scripts'] + transfer.user_scripts = updated_properties['user_scripts'] execution = models.TasksExecution() execution.id = str(uuid.uuid4()) execution.status = constants.EXECUTION_STATUS_UNEXECUTED - execution.action = replica - execution.type = constants.EXECUTION_TYPE_REPLICA_UPDATE + execution.action = transfer + execution.type = constants.EXECUTION_TYPE_TRANSFER_UPDATE - for instance in replica.instances: + for instance in transfer.instances: LOG.debug( - "Pre-replica-update task_info for instance '%s' of Replica " - "'%s': %s", instance, replica_id, + "Pre-transfer-update task_info for instance '%s' of Transfer " + "'%s': %s", instance, transfer_id, utils.sanitize_task_info( - replica.info[instance])) + transfer.info[instance])) # NOTE: "circular assignment" would lead to a `None` value # so we must operate on a copy: - inst_info_copy = copy.deepcopy(replica.info[instance]) + inst_info_copy = copy.deepcopy(transfer.info[instance]) # NOTE: we update the various values in the task info itself # As a result, the values within the task_info will be the updated # values which will be checked. The old values will be sent to the # tasks through the origin/destination parameters for them to be # compared to the new ones. - # The actual values on the Replica object itself will be set + # The actual values on the Transfer object itself will be set # during _handle_post_task_actions once the final destination-side # update task will be completed. inst_info_copy.update({ @@ -3605,45 +3363,45 @@ def update_replica( if "destination_environment" in updated_properties: inst_info_copy["target_environment"] = updated_properties[ "destination_environment"] - replica.info[instance] = inst_info_copy + transfer.info[instance] = inst_info_copy LOG.debug( - "Updated task_info for instance '%s' of Replica " + "Updated task_info for instance '%s' of Transfer " "'%s' which will be verified during update procedure: %s", - instance, replica_id, utils.sanitize_task_info( - replica.info[instance])) + instance, transfer_id, utils.sanitize_task_info( + transfer.info[instance])) get_instance_info_task = self._create_task( instance, constants.TASK_TYPE_GET_INSTANCE_INFO, execution) - update_source_replica_task = self._create_task( - instance, constants.TASK_TYPE_UPDATE_SOURCE_REPLICA, + update_source_transfer_task = self._create_task( + instance, constants.TASK_TYPE_UPDATE_SOURCE_TRANSFER, execution) self._create_task( - instance, constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA, + instance, constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER, execution, depends_on=[ get_instance_info_task.id, # NOTE: the dest-side update task must be done after # the source-side one as both can potentially modify # the 'volumes_info' together: - update_source_replica_task.id]) + update_source_transfer_task.id]) - self._check_execution_tasks_sanity(execution, replica.info) + self._check_execution_tasks_sanity(execution, transfer.info) - # update the action info for all of the instances in the Replica: + # update the action info for all of the instances in the Transfer: for instance in execution.action.instances: db_api.update_transfer_action_info_for_instance( - ctxt, replica.id, instance, replica.info[instance]) + ctxt, transfer.id, instance, transfer.info[instance]) - db_api.add_replica_tasks_execution(ctxt, execution) - LOG.debug("Execution for Replica update tasks created: %s", + db_api.add_transfer_tasks_execution(ctxt, execution) + LOG.debug("Execution for Transfer update tasks created: %s", execution.id) - self._begin_tasks(ctxt, replica, execution) + self._begin_tasks(ctxt, transfer, execution) - return self.get_replica_tasks_execution( - ctxt, replica_id, execution.id) + return self.get_transfer_tasks_execution( + ctxt, transfer_id, execution.id) def get_diagnostics(self, ctxt): diagnostics = utils.get_diagnostics_info() diff --git a/coriolis/constants.py b/coriolis/constants.py index 8d1ec2b5f..2bfbf6e30 100644 --- a/coriolis/constants.py +++ b/coriolis/constants.py @@ -3,6 +3,9 @@ DEFAULT_CORIOLIS_REGION_NAME = "Default Region" +TRANSFER_SCENARIO_REPLICA = "replica" +TRANSFER_SCENARIO_LIVE_MIGRATION = "live_migration" + EXECUTION_STATUS_UNEXECUTED = "UNEXECUTED" EXECUTION_STATUS_RUNNING = "RUNNING" EXECUTION_STATUS_COMPLETED = "COMPLETED" @@ -79,62 +82,37 @@ TASK_STATUS_FAILED_TO_CANCEL ] -TASK_TYPE_DEPLOY_MIGRATION_SOURCE_RESOURCES = ( - "DEPLOY_MIGRATION_SOURCE_RESOURCES") -TASK_TYPE_DEPLOY_MIGRATION_TARGET_RESOURCES = ( - "DEPLOY_MIGRATION_TARGET_RESOURCES") -TASK_TYPE_DELETE_MIGRATION_SOURCE_RESOURCES = ( - "DELETE_MIGRATION_SOURCE_RESOURCES") -TASK_TYPE_DELETE_MIGRATION_TARGET_RESOURCES = ( - "DELETE_MIGRATION_TARGET_RESOURCES") -TASK_TYPE_DEPLOY_INSTANCE_RESOURCES = "DEPLOY_INSTANCE_RESOURCES" TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT = "FINALIZE_INSTANCE_DEPLOYMENT" TASK_TYPE_CLEANUP_FAILED_INSTANCE_DEPLOYMENT = ( "CLEANUP_FAILED_INSTANCE_DEPLOYMENT") -TASK_TYPE_CLEANUP_INSTANCE_SOURCE_STORAGE = ( - "CLEANUP_INSTANCE_SOURCE_STORAGE") -TASK_TYPE_CLEANUP_INSTANCE_TARGET_STORAGE = ( - "CLEANUP_INSTANCE_TARGET_STORAGE") - -TASK_TYPE_CREATE_INSTANCE_DISKS = "CREATE_INSTANCE_DISKS" TASK_TYPE_DEPLOY_OS_MORPHING_RESOURCES = "DEPLOY_OS_MORPHING_RESOURCES" TASK_TYPE_OS_MORPHING = "OS_MORPHING" TASK_TYPE_DELETE_OS_MORPHING_RESOURCES = "DELETE_OS_MORPHING_RESOURCES" TASK_TYPE_GET_INSTANCE_INFO = "GET_INSTANCE_INFO" -TASK_TYPE_DEPLOY_REPLICA_DISKS = "DEPLOY_REPLICA_DISKS" -TASK_TYPE_DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS = ( - "DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS") -TASK_TYPE_DELETE_REPLICA_DISKS = "DELETE_REPLICA_DISKS" +TASK_TYPE_DEPLOY_TRANSFER_DISKS = "DEPLOY_TRANSFER_DISKS" +TASK_TYPE_DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS = ( + "DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS") +TASK_TYPE_DELETE_TRANSFER_DISKS = "DELETE_TRANSFER_DISKS" TASK_TYPE_REPLICATE_DISKS = "REPLICATE_DISKS" -TASK_TYPE_DEPLOY_REPLICA_SOURCE_RESOURCES = "DEPLOY_REPLICA_SOURCE_RESOURCES" -TASK_TYPE_DELETE_REPLICA_SOURCE_RESOURCES = "DELETE_REPLICA_SOURCE_RESOURCES" -TASK_TYPE_DEPLOY_REPLICA_TARGET_RESOURCES = "DEPLOY_REPLICA_TARGET_RESOURCES" -TASK_TYPE_DELETE_REPLICA_TARGET_RESOURCES = "DELETE_REPLICA_TARGET_RESOURCES" +TASK_TYPE_DEPLOY_TRANSFER_SOURCE_RESOURCES = "DEPLOY_TRANSFER_SOURCE_RESOURCES" +TASK_TYPE_DELETE_TRANSFER_SOURCE_RESOURCES = "DELETE_TRANSFER_SOURCE_RESOURCES" +TASK_TYPE_DEPLOY_TRANSFER_TARGET_RESOURCES = "DEPLOY_TRANSFER_TARGET_RESOURCES" +TASK_TYPE_DELETE_TRANSFER_TARGET_RESOURCES = "DELETE_TRANSFER_TARGET_RESOURCES" TASK_TYPE_SHUTDOWN_INSTANCE = "SHUTDOWN_INSTANCE" -TASK_TYPE_DEPLOY_REPLICA_INSTANCE_RESOURCES = ( - "DEPLOY_REPLICA_INSTANCE_RESOURCES") -TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT = ( - "FINALIZE_REPLICA_INSTANCE_DEPLOYMENT") -TASK_TYPE_CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT = ( - "CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT") -TASK_TYPE_CREATE_REPLICA_DISK_SNAPSHOTS = "CREATE_REPLICA_DISK_SNAPSHOTS" -TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS = ( - "DELETE_REPLICA_TARGET_DISK_SNAPSHOTS") -TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS = "RESTORE_REPLICA_DISK_SNAPSHOTS" +TASK_TYPE_DEPLOY_INSTANCE_RESOURCES = "DEPLOY_INSTANCE_RESOURCES" +TASK_TYPE_CREATE_TRANSFER_DISK_SNAPSHOTS = "CREATE_TRANSFER_DISK_SNAPSHOTS" +TASK_TYPE_DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS = ( + "DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS") +TASK_TYPE_RESTORE_TRANSFER_DISK_SNAPSHOTS = "RESTORE_TRANSFER_DISK_SNAPSHOTS" TASK_TYPE_GET_OPTIMAL_FLAVOR = "GET_OPTIMAL_FLAVOR" -TASK_TYPE_VALIDATE_MIGRATION_SOURCE_INPUTS = ( - "VALIDATE_MIGRATION_SOURCE_INPUTS") -TASK_TYPE_VALIDATE_MIGRATION_DESTINATION_INPUTS = ( - "VALIDATE_MIGRATION_DESTINATION_INPUTS") -TASK_TYPE_VALIDATE_REPLICA_SOURCE_INPUTS = "VALIDATE_REPLICA_SOURCE_INPUTS" -TASK_TYPE_VALIDATE_REPLICA_DESTINATION_INPUTS = ( - "VALIDATE_REPLICA_DESTINATION_INPUTS") -TASK_TYPE_VALIDATE_REPLICA_DEPLOYMENT_INPUTS = ( - "VALIDATE_REPLICA_DEPLOYMENT_INPUTS") -TASK_TYPE_UPDATE_SOURCE_REPLICA = "UPDATE_SOURCE_REPLICA" -TASK_TYPE_UPDATE_DESTINATION_REPLICA = "UPDATE_DESTINATION_REPLICA" +TASK_TYPE_VALIDATE_TRANSFER_SOURCE_INPUTS = "VALIDATE_TRANSFER_SOURCE_INPUTS" +TASK_TYPE_VALIDATE_TRANSFER_DESTINATION_INPUTS = ( + "VALIDATE_TRANSFER_DESTINATION_INPUTS") +TASK_TYPE_VALIDATE_DEPLOYMENT_INPUTS = "VALIDATE_DEPLOYMENT_INPUTS" +TASK_TYPE_UPDATE_SOURCE_TRANSFER = "UPDATE_SOURCE_TRANSFER" +TASK_TYPE_UPDATE_DESTINATION_TRANSFER = "UPDATE_DESTINATION_TRANSFER" TASK_TYPE_VALIDATE_SOURCE_MINION_POOL_OPTIONS = ( "VALIDATE_SOURCE_MINION_POOL_ENVIRONMENT_OPTIONS") @@ -211,8 +189,8 @@ PROVIDER_TYPE_IMPORT = 1 PROVIDER_TYPE_EXPORT = 2 -PROVIDER_TYPE_REPLICA_IMPORT = 4 -PROVIDER_TYPE_REPLICA_EXPORT = 8 +PROVIDER_TYPE_TRANSFER_IMPORT = 4 +PROVIDER_TYPE_TRANSFER_EXPORT = 8 PROVIDER_TYPE_ENDPOINT = 16 PROVIDER_TYPE_ENDPOINT_INSTANCES = 32 PROVIDER_TYPE_OS_MORPHING = 64 @@ -220,16 +198,18 @@ PROVIDER_TYPE_INSTANCE_FLAVOR = 256 PROVIDER_TYPE_DESTINATION_ENDPOINT_OPTIONS = 512 PROVIDER_TYPE_SETUP_LIBS = 1024 -PROVIDER_TYPE_VALIDATE_MIGRATION_EXPORT = 2048 -PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT = 4096 -PROVIDER_TYPE_VALIDATE_MIGRATION_IMPORT = 8192 -PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT = 16384 +PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT = 4096 +PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT = 16384 PROVIDER_TYPE_ENDPOINT_STORAGE = 32768 -PROVIDER_TYPE_SOURCE_REPLICA_UPDATE = 65536 +PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE = 65536 PROVIDER_TYPE_SOURCE_ENDPOINT_OPTIONS = 131072 -PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE = 262144 +PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE = 262144 PROVIDER_TYPE_SOURCE_MINION_POOL = 524288 PROVIDER_TYPE_DESTINATION_MINION_POOL = 1048576 +# NOTE(dvincze): These are deprecated, we should remove them, +# and de-increment the rest +PROVIDER_TYPE_VALIDATE_MIGRATION_EXPORT = 2048 +PROVIDER_TYPE_VALIDATE_MIGRATION_IMPORT = 8192 DISK_FORMAT_VMDK = 'vmdk' DISK_FORMAT_RAW = 'raw' @@ -281,14 +261,13 @@ COMPRESSION_FORMAT_ZLIB ] -TRANSFER_ACTION_TYPE_MIGRATION = "migration" -TRANSFER_ACTION_TYPE_REPLICA = "replica" +TRANSFER_ACTION_TYPE_DEPLOYMENT = "deployment" +TRANSFER_ACTION_TYPE_TRANSFER = "transfer" -EXECUTION_TYPE_REPLICA_EXECUTION = "replica_execution" -EXECUTION_TYPE_REPLICA_DISKS_DELETE = "replica_disks_delete" -EXECUTION_TYPE_REPLICA_DEPLOY = "replica_deploy" -EXECUTION_TYPE_MIGRATION = "migration" -EXECUTION_TYPE_REPLICA_UPDATE = "replica_update" +EXECUTION_TYPE_TRANSFER_EXECUTION = "transfer_execution" +EXECUTION_TYPE_TRANSFER_DISKS_DELETE = "transfer_disks_delete" +EXECUTION_TYPE_DEPLOYMENT = "deployment" +EXECUTION_TYPE_TRANSFER_UPDATE = "transfer_update" EXECUTION_TYPE_MINION_POOL_MAINTENANCE = "minion_pool_maintenance" EXECUTION_TYPE_MINION_POOL_UPDATE = "minion_pool_update" EXECUTION_TYPE_MINION_POOL_SET_UP_SHARED_RESOURCES = ( @@ -303,8 +282,8 @@ TASKFLOW_LOCK_NAME_FORMAT = "taskflow-%s" EXECUTION_LOCK_NAME_FORMAT = "execution-%s" ENDPOINT_LOCK_NAME_FORMAT = "endpoint-%s" -MIGRATION_LOCK_NAME_FORMAT = "migration-%s" -REPLICA_LOCK_NAME_FORMAT = "replica-%s" +DEPLOYMENT_LOCK_NAME_FORMAT = "deployment-%s" +TRANSFER_LOCK_NAME_FORMAT = "transfer-%s" SCHEDULE_LOCK_NAME_FORMAT = "schedule-%s" REGION_LOCK_NAME_FORMAT = "region-%s" SERVICE_LOCK_NAME_FORMAT = "service-%s" @@ -312,11 +291,10 @@ MINION_MACHINE_LOCK_NAME_FORMAT = "minion-pool-%s-machine-%s" EXECUTION_TYPE_TO_ACTION_LOCK_NAME_FORMAT_MAP = { - EXECUTION_TYPE_MIGRATION: MIGRATION_LOCK_NAME_FORMAT, - EXECUTION_TYPE_REPLICA_EXECUTION: REPLICA_LOCK_NAME_FORMAT, - EXECUTION_TYPE_REPLICA_DEPLOY: REPLICA_LOCK_NAME_FORMAT, - EXECUTION_TYPE_REPLICA_UPDATE: REPLICA_LOCK_NAME_FORMAT, - EXECUTION_TYPE_REPLICA_DISKS_DELETE: REPLICA_LOCK_NAME_FORMAT, + EXECUTION_TYPE_TRANSFER_EXECUTION: TRANSFER_LOCK_NAME_FORMAT, + EXECUTION_TYPE_DEPLOYMENT: TRANSFER_LOCK_NAME_FORMAT, + EXECUTION_TYPE_TRANSFER_UPDATE: TRANSFER_LOCK_NAME_FORMAT, + EXECUTION_TYPE_TRANSFER_DISKS_DELETE: TRANSFER_LOCK_NAME_FORMAT, EXECUTION_TYPE_MINION_POOL_MAINTENANCE: MINION_POOL_LOCK_NAME_FORMAT, EXECUTION_TYPE_MINION_POOL_UPDATE: MINION_POOL_LOCK_NAME_FORMAT, EXECUTION_TYPE_MINION_POOL_SET_UP_SHARED_RESOURCES: ( @@ -335,7 +313,7 @@ CONDUCTOR_MAIN_MESSAGING_TOPIC = "coriolis_conductor" WORKER_MAIN_MESSAGING_TOPIC = "coriolis_worker" SCHEDULER_MAIN_MESSAGING_TOPIC = "coriolis_scheduler" -REPLICA_CRON_MAIN_MESSAGING_TOPIC = "coriolis_replica_cron_worker" +TRANSFER_CRON_MAIN_MESSAGING_TOPIC = "coriolis_transfer_cron_worker" MINION_MANAGER_MAIN_MESSAGING_TOPIC = "coriolis_minion_manager" MINION_POOL_MACHINE_RETENTION_STRATEGY_DELETE = "delete" diff --git a/coriolis/db/api.py b/coriolis/db/api.py index 76c6d72e5..cd5111e77 100644 --- a/coriolis/db/api.py +++ b/coriolis/db/api.py @@ -98,26 +98,26 @@ def _update_sqlalchemy_object_fields( "of type '%s': %s" % (type(obj), values_to_update.keys())) -def _get_replica_schedules_filter(context, replica_id=None, - schedule_id=None, expired=True): +def _get_transfer_schedules_filter(context, transfer_id=None, + schedule_id=None, expired=True): now = timeutils.utcnow() - q = _soft_delete_aware_query(context, models.ReplicaSchedule) - q = q.join(models.Replica) + q = _soft_delete_aware_query(context, models.TransferSchedule) + q = q.join(models.Transfer) sched_filter = q.filter() if is_user_context(context): sched_filter = sched_filter.filter( - models.Replica.project_id == context.project_id) + models.Transfer.project_id == context.project_id) - if replica_id: + if transfer_id: sched_filter = sched_filter.filter( - models.Replica.id == replica_id) + models.Transfer.id == transfer_id) if schedule_id: sched_filter = sched_filter.filter( - models.ReplicaSchedule.id == schedule_id) + models.TransferSchedule.id == schedule_id) if not expired: sched_filter = sched_filter.filter( - or_(models.ReplicaSchedule.expiration_date == null(), - models.ReplicaSchedule.expiration_date > now)) + or_(models.TransferSchedule.expiration_date == null(), + models.TransferSchedule.expiration_date > now)) return sched_filter @@ -274,37 +274,37 @@ def delete_endpoint(context, endpoint_id): @enginefacade.reader -def get_replica_tasks_executions(context, replica_id, include_tasks=False, - include_task_info=False, to_dict=False): +def get_transfer_tasks_executions(context, transfer_id, include_tasks=False, + include_task_info=False, to_dict=False): q = _soft_delete_aware_query(context, models.TasksExecution) - q = q.join(models.Replica) + q = q.join(models.Transfer) if include_task_info: q = q.options(orm.joinedload('action').undefer('info')) if include_tasks: q = _get_tasks_with_details_options(q) if is_user_context(context): - q = q.filter(models.Replica.project_id == context.project_id) + q = q.filter(models.Transfer.project_id == context.project_id) db_result = q.filter( - models.Replica.id == replica_id).all() + models.Transfer.id == transfer_id).all() if to_dict: return [e.to_dict() for e in db_result] return db_result @enginefacade.reader -def get_replica_tasks_execution(context, replica_id, execution_id, - include_task_info=False, to_dict=False): +def get_transfer_tasks_execution(context, transfer_id, execution_id, + include_task_info=False, to_dict=False): q = _soft_delete_aware_query(context, models.TasksExecution).join( - models.Replica) + models.Transfer) if include_task_info: q = q.options(orm.joinedload('action').undefer('info')) q = _get_tasks_with_details_options(q) if is_user_context(context): - q = q.filter(models.Replica.project_id == context.project_id) + q = q.filter(models.Transfer.project_id == context.project_id) db_result = q.filter( - models.Replica.id == replica_id, + models.Transfer.id == transfer_id, models.TasksExecution.id == execution_id).first() if to_dict and db_result is not None: return db_result.to_dict() @@ -312,7 +312,7 @@ def get_replica_tasks_execution(context, replica_id, execution_id, @enginefacade.writer -def add_replica_tasks_execution(context, execution): +def add_transfer_tasks_execution(context, execution): if is_user_context(context): if execution.action.project_id != context.project_id: raise exception.NotAuthorized() @@ -330,12 +330,12 @@ def add_replica_tasks_execution(context, execution): @enginefacade.writer -def delete_replica_tasks_execution(context, execution_id): +def delete_transfer_tasks_execution(context, execution_id): q = _soft_delete_aware_query(context, models.TasksExecution).filter( models.TasksExecution.id == execution_id) if is_user_context(context): - if not q.join(models.Replica).filter( - models.Replica.project_id == context.project_id).first(): + if not q.join(models.Transfer).filter( + models.Transfer.project_id == context.project_id).first(): raise exception.NotAuthorized() count = q.soft_delete() if count == 0: @@ -343,28 +343,28 @@ def delete_replica_tasks_execution(context, execution_id): @enginefacade.reader -def get_replica_schedules(context, replica_id=None, expired=True): - sched_filter = _get_replica_schedules_filter( - context, replica_id=replica_id, expired=expired) +def get_transfer_schedules(context, transfer_id=None, expired=True): + sched_filter = _get_transfer_schedules_filter( + context, transfer_id=transfer_id, expired=expired) return sched_filter.all() @enginefacade.reader -def get_replica_schedule(context, replica_id, schedule_id, expired=True): - sched_filter = _get_replica_schedules_filter( - context, replica_id=replica_id, schedule_id=schedule_id, +def get_transfer_schedule(context, transfer_id, schedule_id, expired=True): + sched_filter = _get_transfer_schedules_filter( + context, transfer_id=transfer_id, schedule_id=schedule_id, expired=expired) return sched_filter.first() @enginefacade.writer -def update_replica_schedule(context, replica_id, schedule_id, - updated_values, pre_update_callable=None, - post_update_callable=None): +def update_transfer_schedule(context, transfer_id, schedule_id, + updated_values, pre_update_callable=None, + post_update_callable=None): # NOTE(gsamfira): we need to refactor the DB layer a bit to allow # two-phase transactions or at least allow running these functions # inside a single transaction block. - schedule = get_replica_schedule(context, replica_id, schedule_id) + schedule = get_transfer_schedule(context, transfer_id, schedule_id) if pre_update_callable: pre_update_callable(schedule=schedule) for val in ["schedule", "expiration_date", "enabled", "shutdown_instance"]: @@ -378,23 +378,23 @@ def update_replica_schedule(context, replica_id, schedule_id, @enginefacade.writer -def delete_replica_schedule(context, replica_id, - schedule_id, pre_delete_callable=None, - post_delete_callable=None): +def delete_transfer_schedule(context, transfer_id, + schedule_id, pre_delete_callable=None, + post_delete_callable=None): # NOTE(gsamfira): we need to refactor the DB layer a bit to allow # two-phase transactions or at least allow running these functions # inside a single transaction block. - q = _soft_delete_aware_query(context, models.ReplicaSchedule).filter( - models.ReplicaSchedule.id == schedule_id, - models.ReplicaSchedule.replica_id == replica_id) + q = _soft_delete_aware_query(context, models.TransferSchedule).filter( + models.TransferSchedule.id == schedule_id, + models.TransferSchedule.transfer_id == transfer_id) schedule = q.first() if not schedule: raise exception.NotFound( "No such schedule") if is_user_context(context): - if not q.join(models.Replica).filter( - models.Replica.project_id == context.project_id).first(): + if not q.join(models.Transfer).filter( + models.Transfer.project_id == context.project_id).first(): raise exception.NotAuthorized() if pre_delete_callable: pre_delete_callable(context, schedule) @@ -406,36 +406,39 @@ def delete_replica_schedule(context, replica_id, @enginefacade.writer -def add_replica_schedule(context, schedule, post_create_callable=None): +def add_transfer_schedule(context, schedule, post_create_callable=None): # NOTE(gsamfira): we need to refactor the DB layer a bit to allow # two-phase transactions or at least allow running these functions # inside a single transaction block. - if schedule.replica.project_id != context.project_id: + if schedule.transfer.project_id != context.project_id: raise exception.NotAuthorized() _session(context).add(schedule) if post_create_callable: post_create_callable(context, schedule) -def _get_replica_with_tasks_executions_options(q): - return q.options(orm.joinedload(models.Replica.executions)) +def _get_transfer_with_tasks_executions_options(q): + return q.options(orm.joinedload(models.Transfer.executions)) @enginefacade.reader -def get_replicas(context, - include_tasks_executions=False, - include_task_info=False, - to_dict=False): - q = _soft_delete_aware_query(context, models.Replica) +def get_transfers(context, + transfer_scenario=None, + include_tasks_executions=False, + include_task_info=False, + to_dict=False): + q = _soft_delete_aware_query(context, models.Transfer) if include_tasks_executions: - q = _get_replica_with_tasks_executions_options(q) + q = _get_transfer_with_tasks_executions_options(q) if include_task_info: q = q.options(orm.undefer('info')) q = q.filter() + if transfer_scenario: + q = q.filter(models.Transfer.scenario == transfer_scenario) if is_user_context(context): q = q.filter( - models.Replica.project_id == context.project_id) + models.Transfer.project_id == context.project_id) db_result = q.all() if to_dict: return [ @@ -447,41 +450,55 @@ def get_replicas(context, @enginefacade.reader -def get_replica(context, replica_id, include_task_info=False, to_dict=False): - q = _soft_delete_aware_query(context, models.Replica) - q = _get_replica_with_tasks_executions_options(q) +def get_transfer(context, transfer_id, + transfer_scenario=None, + include_task_info=False, + to_dict=False): + q = _soft_delete_aware_query(context, models.Transfer) + q = _get_transfer_with_tasks_executions_options(q) if include_task_info: q = q.options(orm.undefer('info')) + if transfer_scenario: + q = q.filter( + models.Transfer.scenario == transfer_scenario) if is_user_context(context): q = q.filter( - models.Replica.project_id == context.project_id) + models.Transfer.project_id == context.project_id) - replica = q.filter( - models.Replica.id == replica_id).first() - if to_dict and replica is not None: - return replica.to_dict(include_task_info=include_task_info) + transfer = q.filter( + models.Transfer.id == transfer_id).first() + if to_dict and transfer is not None: + return transfer.to_dict(include_task_info=include_task_info) - return replica + return transfer @enginefacade.reader -def get_endpoint_replicas_count(context, endpoint_id): +def get_endpoint_transfers_count( + context, endpoint_id, transfer_scenario=None): + + scenario_filter_kwargs = {} + if transfer_scenario: + scenario_filter_kwargs = {"scenario": transfer_scenario} + origin_args = {'origin_endpoint_id': endpoint_id} + origin_args.update(scenario_filter_kwargs) q_origin_count = _soft_delete_aware_query( - context, models.Replica).filter_by(**origin_args).count() + context, models.Transfer).filter_by(**origin_args).count() destination_args = {'destination_endpoint_id': endpoint_id} + destination_args.update(scenario_filter_kwargs) q_destination_count = _soft_delete_aware_query( - context, models.Replica).filter_by(**destination_args).count() + context, models.Transfer).filter_by(**destination_args).count() return q_origin_count + q_destination_count @enginefacade.writer -def add_replica(context, replica): - replica.user_id = context.user - replica.project_id = context.project_id - _session(context).add(replica) +def add_transfer(context, transfer): + transfer.user_id = context.user + transfer.project_id = context.project_id + _session(context).add(transfer) @enginefacade.writer @@ -499,28 +516,30 @@ def _delete_transfer_action(context, cls, id): @enginefacade.writer -def delete_replica(context, replica_id): - _delete_transfer_action(context, models.Replica, replica_id) +def delete_transfer(context, transfer_id): + _delete_transfer_action(context, models.Transfer, transfer_id) @enginefacade.reader -def get_replica_migrations(context, replica_id): - q = _soft_delete_aware_query(context, models.Migration) - q = q.join("replica") +def get_transfer_deployments(context, transfer_id): + q = _soft_delete_aware_query(context, models.Deployment) + q = q.join("transfer") q = q.options(orm.joinedload("executions")) if is_user_context(context): q = q.filter( - models.Migration.project_id == context.project_id) + models.Deployment.project_id == context.project_id) return q.filter( - models.Replica.id == replica_id).all() + models.Transfer.id == transfer_id).all() @enginefacade.reader -def get_migrations(context, include_tasks=False, - include_task_info=False, to_dict=False): - q = _soft_delete_aware_query(context, models.Migration) +def get_deployments(context, + include_tasks=False, + include_task_info=False, + to_dict=False): + q = _soft_delete_aware_query(context, models.Deployment) if include_tasks: - q = _get_migration_task_query_options(q) + q = _get_deployment_task_query_options(q) else: q = q.options(orm.joinedload("executions")) if include_task_info: @@ -546,7 +565,7 @@ def _get_tasks_with_details_options(query): joinedload("events")) -def _get_migration_task_query_options(query): +def _get_deployment_task_query_options(query): return query.options( orm.joinedload("executions"). joinedload("tasks"). @@ -559,13 +578,13 @@ def _get_migration_task_query_options(query): @enginefacade.reader -def get_migration(context, migration_id, include_task_info=False, - to_dict=False): - q = _soft_delete_aware_query(context, models.Migration) - q = _get_migration_task_query_options(q) +def get_deployment(context, deployment_id, include_task_info=False, + to_dict=False): + q = _soft_delete_aware_query(context, models.Deployment) + q = _get_deployment_task_query_options(q) if include_task_info: q = q.options(orm.undefer('info')) - args = {"id": migration_id} + args = {"id": deployment_id} if is_user_context(context): args["project_id"] = context.project_id db_result = q.filter_by(**args).first() @@ -576,15 +595,15 @@ def get_migration(context, migration_id, include_task_info=False, @enginefacade.writer -def add_migration(context, migration): - migration.user_id = context.user - migration.project_id = context.project_id - _session(context).add(migration) +def add_deployment(context, deployment): + deployment.user_id = context.user + deployment.project_id = context.project_id + _session(context).add(deployment) @enginefacade.writer -def delete_migration(context, migration_id): - _delete_transfer_action(context, models.Migration, migration_id) +def delete_deployment(context, deployment_id): + _delete_transfer_action(context, models.Deployment, deployment_id) @enginefacade.writer @@ -918,10 +937,10 @@ def update_task_progress_update( @enginefacade.writer -def update_replica(context, replica_id, updated_values): - replica = get_replica(context, replica_id) - if not replica: - raise exception.NotFound("Replica not found") +def update_transfer(context, transfer_id, updated_values): + transfer = get_transfer(context, transfer_id) + if not transfer: + raise exception.NotFound("Transfer not found") mapped_info_fields = { 'destination_environment': 'target_environment'} @@ -934,11 +953,11 @@ def update_replica(context, replica_id, updated_values): for field in updateable_fields: if mapped_info_fields.get(field, field) in updated_values: LOG.debug( - "Updating the '%s' field of Replica '%s' to: '%s'", - field, replica_id, updated_values[ + "Updating the '%s' field of Transfer '%s' to: '%s'", + field, transfer_id, updated_values[ mapped_info_fields.get(field, field)]) setattr( - replica, field, + transfer, field, updated_values[mapped_info_fields.get(field, field)]) non_updateable_fields = set( @@ -947,12 +966,12 @@ def update_replica(context, replica_id, updated_values): for field in updateable_fields}) if non_updateable_fields: LOG.warn( - "The following Replica fields can NOT be updated: %s", + "The following Transfer fields can NOT be updated: %s", non_updateable_fields) # the oslo_db library uses this method for both the `created_at` and # `updated_at` fields - setattr(replica, 'updated_at', timeutils.utcnow()) + setattr(transfer, 'updated_at', timeutils.utcnow()) @enginefacade.writer diff --git a/coriolis/db/sqlalchemy/migrate_repo/versions/019_add_replica_scenario_field.py b/coriolis/db/sqlalchemy/migrate_repo/versions/019_add_replica_scenario_field.py new file mode 100644 index 000000000..e49d361c4 --- /dev/null +++ b/coriolis/db/sqlalchemy/migrate_repo/versions/019_add_replica_scenario_field.py @@ -0,0 +1,18 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +import sqlalchemy + + +def upgrade(migrate_engine): + meta = sqlalchemy.MetaData() + meta.bind = migrate_engine + + replica = sqlalchemy.Table( + 'replica', meta, autoload=True) + + replica_scenario = sqlalchemy.Column( + "scenario", sqlalchemy.String(255), nullable=False, + default="replica") + + replica.create_column(replica_scenario) diff --git a/coriolis/db/sqlalchemy/migrate_repo/versions/020_rename_tables.py b/coriolis/db/sqlalchemy/migrate_repo/versions/020_rename_tables.py new file mode 100644 index 000000000..cc5e259ef --- /dev/null +++ b/coriolis/db/sqlalchemy/migrate_repo/versions/020_rename_tables.py @@ -0,0 +1,30 @@ +import sqlalchemy + + +def upgrade(migrate_engine): + meta = sqlalchemy.MetaData() + meta.bind = migrate_engine + + replica = sqlalchemy.Table('replica', meta, autoload=True) + replica.rename('transfer') + + migration = sqlalchemy.Table('migration', meta, autoload=True) + migration.rename('deployment') + migration.c.replica_id.alter(name='transfer_id', nullable=False) + migration.c.replication_count.drop() + + replica_schedule = sqlalchemy.Table( + 'replica_schedules', meta, autoload=True) + replica_schedule.rename('transfer_schedules') + replica_schedule.c.replica_id.alter(name='transfer_id') + + # NOTE(dvincze): Update models polymorphic identity + # Due to the model code changes, this cannot be done using the ORM. + # Had to resort to using raw SQL statements. + with migrate_engine.connect() as conn: + conn.execute( + 'UPDATE base_transfer_action SET type = "transfer" ' + 'WHERE type = "replica";') + conn.execute( + 'UPDATE base_transfer_action SET type = "deployment" ' + 'WHERE type = "migration";') diff --git a/coriolis/db/sqlalchemy/models.py b/coriolis/db/sqlalchemy/models.py index f326837ef..638d13d96 100644 --- a/coriolis/db/sqlalchemy/models.py +++ b/coriolis/db/sqlalchemy/models.py @@ -323,56 +323,61 @@ def to_dict(self, include_task_info=True, include_executions=True): return result -class Replica(BaseTransferAction): - __tablename__ = 'replica' +class Transfer(BaseTransferAction): + __tablename__ = 'transfer' id = sqlalchemy.Column( sqlalchemy.String(36), sqlalchemy.ForeignKey( 'base_transfer_action.base_id'), primary_key=True) + scenario = sqlalchemy.Column( + sqlalchemy.String(255), nullable=False, + default=constants.TRANSFER_SCENARIO_REPLICA) __mapper_args__ = { - 'polymorphic_identity': 'replica', + 'polymorphic_identity': 'transfer', } def to_dict(self, include_task_info=True, include_executions=True): - base = super(Replica, self).to_dict( + base = super(Transfer, self).to_dict( include_task_info=include_task_info, include_executions=include_executions) - base.update({"id": self.id}) + base.update({ + "id": self.id, + "scenario": self.scenario}) return base -class Migration(BaseTransferAction): - __tablename__ = 'migration' +class Deployment(BaseTransferAction): + __tablename__ = 'deployment' id = sqlalchemy.Column( sqlalchemy.String(36), sqlalchemy.ForeignKey( 'base_transfer_action.base_id'), primary_key=True) - replica_id = sqlalchemy.Column( + transfer_id = sqlalchemy.Column( sqlalchemy.String(36), - sqlalchemy.ForeignKey('replica.id'), nullable=True) - replica = orm.relationship( - Replica, backref=orm.backref("migrations"), foreign_keys=[replica_id]) + sqlalchemy.ForeignKey('transfer.id'), nullable=False) + transfer = orm.relationship( + Transfer, backref=orm.backref("deployments"), + foreign_keys=[transfer_id]) shutdown_instances = sqlalchemy.Column( sqlalchemy.Boolean, nullable=False, default=False) - replication_count = sqlalchemy.Column( - sqlalchemy.Integer, nullable=False, default=2) __mapper_args__ = { - 'polymorphic_identity': 'migration', + 'polymorphic_identity': 'deployment', } def to_dict(self, include_task_info=True, include_tasks=True): - base = super(Migration, self).to_dict( + base = super(Deployment, self).to_dict( include_task_info=include_task_info, include_executions=include_tasks) + base.update({ "id": self.id, - "replica_id": self.replica_id, + "transfer_id": self.transfer_id, + "transfer_scenario_type": self.transfer.scenario, "shutdown_instances": self.shutdown_instances, - "replication_count": self.replication_count, }) return base @@ -658,18 +663,18 @@ class Endpoint(BASE, models.TimestampMixin, models.ModelBase, secondary="endpoint_region_mapping") -class ReplicaSchedule(BASE, models.TimestampMixin, models.ModelBase, - models.SoftDeleteMixin): - __tablename__ = "replica_schedules" +class TransferSchedule(BASE, models.TimestampMixin, models.ModelBase, + models.SoftDeleteMixin): + __tablename__ = "transfer_schedules" id = sqlalchemy.Column(sqlalchemy.String(36), default=lambda: str(uuid.uuid4()), primary_key=True) - replica_id = sqlalchemy.Column( + transfer_id = sqlalchemy.Column( sqlalchemy.String(36), - sqlalchemy.ForeignKey('replica.id'), nullable=False) - replica = orm.relationship( - Replica, backref=orm.backref("schedules"), foreign_keys=[replica_id]) + sqlalchemy.ForeignKey('transfer.id'), nullable=False) + transfer = orm.relationship( + Transfer, backref=orm.backref("schedules"), foreign_keys=[transfer_id]) schedule = sqlalchemy.Column(types.Json, nullable=False) expiration_date = sqlalchemy.Column( sqlalchemy.types.DateTime, nullable=True) diff --git a/coriolis/migrations/__init__.py b/coriolis/deployments/__init__.py similarity index 100% rename from coriolis/migrations/__init__.py rename to coriolis/deployments/__init__.py diff --git a/coriolis/deployments/api.py b/coriolis/deployments/api.py new file mode 100644 index 000000000..8cc21eec6 --- /dev/null +++ b/coriolis/deployments/api.py @@ -0,0 +1,35 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.conductor.rpc import client as rpc_client + + +class API(object): + def __init__(self): + self._rpc_client = rpc_client.ConductorClient() + + def deploy_transfer_instances(self, ctxt, transfer_id, + instance_osmorphing_minion_pool_mappings, + clone_disks=False, force=False, + skip_os_morphing=False, user_scripts=None): + return self._rpc_client.deploy_transfer_instances( + ctxt, transfer_id, instance_osmorphing_minion_pool_mappings=( + instance_osmorphing_minion_pool_mappings), + clone_disks=clone_disks, force=force, + skip_os_morphing=skip_os_morphing, + user_scripts=user_scripts) + + def delete(self, ctxt, deployment_id): + self._rpc_client.delete_deployment(ctxt, deployment_id) + + def cancel(self, ctxt, deployment_id, force): + self._rpc_client.cancel_deployment(ctxt, deployment_id, force) + + def get_deployments(self, ctxt, include_tasks=False, + include_task_info=False): + return self._rpc_client.get_deployments( + ctxt, include_tasks, include_task_info=include_task_info) + + def get_deployment(self, ctxt, deployment_id, include_task_info=False): + return self._rpc_client.get_deployment( + ctxt, deployment_id, include_task_info=include_task_info) diff --git a/coriolis/migrations/manager.py b/coriolis/deployments/manager.py similarity index 100% rename from coriolis/migrations/manager.py rename to coriolis/deployments/manager.py diff --git a/coriolis/diagnostics/api.py b/coriolis/diagnostics/api.py index 364259303..aaa865a85 100644 --- a/coriolis/diagnostics/api.py +++ b/coriolis/diagnostics/api.py @@ -2,7 +2,7 @@ # All Rights Reserved. from coriolis.conductor.rpc import client as conductor_rpc -from coriolis.replica_cron.rpc import client as cron_rpc +from coriolis.transfer_cron.rpc import client as cron_rpc from coriolis import utils from coriolis.worker.rpc import client as worker_rpc @@ -10,7 +10,7 @@ class API(object): def __init__(self): self._conductor_cli = conductor_rpc.ConductorClient() - self._cron_cli = cron_rpc.ReplicaCronClient() + self._cron_cli = cron_rpc.TransferCronClient() self._worker_cli = worker_rpc.WorkerClient() def get(self, ctxt): diff --git a/coriolis/exception.py b/coriolis/exception.py index a1c779f0c..603b0b9c7 100644 --- a/coriolis/exception.py +++ b/coriolis/exception.py @@ -137,6 +137,12 @@ class Conflict(CoriolisException): safe = True +class LicensingException(Conflict): + message = _("Licensing exception occurred") + code = 409 + safe = True + + class AdminRequired(NotAuthorized): message = _("User does not have admin privileges") @@ -229,12 +235,12 @@ class InvalidActionTasksExecutionState(Invalid): message = _("Invalid tasks execution state: %(reason)s") -class InvalidMigrationState(Invalid): - message = _("Invalid migration state: %(reason)s") +class InvalidDeploymentState(Invalid): + message = _("Invalid deployment state: %(reason)s") -class InvalidReplicaState(Invalid): - message = _("Invalid replica state: %(reason)s") +class InvalidTransferState(Invalid): + message = _("Invalid transfer state: %(reason)s") class InvalidInstanceState(Invalid): @@ -517,3 +523,11 @@ class OSMorphingWinRMOperationTimeout(OSMorphingOperationTimeout): " or the command execution time exceeds the timeout set. Try extending" " the timeout by editing the 'default_osmorphing_operation_timeout' " "in Coriolis' static configuration file.") + + +class MigrationLicenceFulfilledException(Invalid): + message = ( + "The Live Migration operation with ID '%(action_id)s' (licensing " + "reservation '%(reservation_id)s') has already been fulfilled on " + "%(fulfilled_at)s. Please create a new Live Migration operation to " + "create a new licensing reservation.") diff --git a/coriolis/licensing/client.py b/coriolis/licensing/client.py index da291d632..967ce8eb2 100644 --- a/coriolis/licensing/client.py +++ b/coriolis/licensing/client.py @@ -44,7 +44,7 @@ def from_env(cls): "instantiate licensing client.") return None allow_untrusted = os.environ.get( - "LICENSING_SERVER_ALLOW_UNTRUSTED", False) + "LICENSING_SERVER_ALLOW_UNTRUSTED", None) is not None client = cls( base_url, appliance_id=None, allow_untrusted=allow_untrusted) appliance_ids = client.get_appliances() @@ -214,6 +214,12 @@ def check_refresh_reservation(self, reservation_id): "/reservations/%s/refresh" % reservation_id, None, response_key="reservation") + def mark_reservation_fulfilled(self, reservation_id): + """ Marks the given reservation as fulfilled. """ + return self._post( + "/reservations/%s/fulfill" % reservation_id, None, + response_key="reservation") + def delete_reservation(self, reservation_id, raise_on_404=False): """ Deletes a reservation by its ID. Unless `raise_on_404` is set, ignores not found reservations. diff --git a/coriolis/migrations/api.py b/coriolis/migrations/api.py deleted file mode 100644 index 7918825fb..000000000 --- a/coriolis/migrations/api.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.conductor.rpc import client as rpc_client - - -class API(object): - def __init__(self): - self._rpc_client = rpc_client.ConductorClient() - - def migrate_instances(self, ctxt, origin_endpoint_id, - destination_endpoint_id, origin_minion_pool_id, - destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, - instances, network_map, storage_mappings, - replication_count, - shutdown_instances, notes=None, - skip_os_morphing=False, user_scripts=None): - return self._rpc_client.migrate_instances( - ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, source_environment, - destination_environment, instances, network_map, - storage_mappings, replication_count, - shutdown_instances=shutdown_instances, - notes=notes, skip_os_morphing=skip_os_morphing, - user_scripts=user_scripts) - - def deploy_replica_instances(self, ctxt, replica_id, - instance_osmorphing_minion_pool_mappings, - clone_disks=False, force=False, - skip_os_morphing=False, user_scripts=None): - return self._rpc_client.deploy_replica_instances( - ctxt, replica_id, instance_osmorphing_minion_pool_mappings=( - instance_osmorphing_minion_pool_mappings), - clone_disks=clone_disks, force=force, - skip_os_morphing=skip_os_morphing, - user_scripts=user_scripts) - - def delete(self, ctxt, migration_id): - self._rpc_client.delete_migration(ctxt, migration_id) - - def cancel(self, ctxt, migration_id, force): - self._rpc_client.cancel_migration(ctxt, migration_id, force) - - def get_migrations(self, ctxt, include_tasks=False, - include_task_info=False): - return self._rpc_client.get_migrations( - ctxt, include_tasks, include_task_info=include_task_info) - - def get_migration(self, ctxt, migration_id, include_task_info=False): - return self._rpc_client.get_migration( - ctxt, migration_id, include_task_info=include_task_info) diff --git a/coriolis/minion_manager/rpc/client.py b/coriolis/minion_manager/rpc/client.py index 4803047d8..5253dc458 100644 --- a/coriolis/minion_manager/rpc/client.py +++ b/coriolis/minion_manager/rpc/client.py @@ -67,17 +67,17 @@ def validate_minion_pool_selections_for_action(self, ctxt, action): ctxt, 'validate_minion_pool_selections_for_action', action=action) - def allocate_minion_machines_for_replica( - self, ctxt, replica): + def allocate_minion_machines_for_transfer( + self, ctxt, transfer): return self._cast( - ctxt, 'allocate_minion_machines_for_replica', replica=replica) + ctxt, 'allocate_minion_machines_for_transfer', transfer=transfer) - def allocate_minion_machines_for_migration( - self, ctxt, migration, include_transfer_minions=True, + def allocate_minion_machines_for_deployment( + self, ctxt, deployment, include_transfer_minions=True, include_osmorphing_minions=True): return self._cast( - ctxt, 'allocate_minion_machines_for_migration', - migration=migration, + ctxt, 'allocate_minion_machines_for_deployment', + deployment=deployment, include_transfer_minions=include_transfer_minions, include_osmorphing_minions=include_osmorphing_minions) diff --git a/coriolis/minion_manager/rpc/server.py b/coriolis/minion_manager/rpc/server.py index d639cd7f0..3d74552f0 100644 --- a/coriolis/minion_manager/rpc/server.py +++ b/coriolis/minion_manager/rpc/server.py @@ -67,7 +67,7 @@ def __init__(self): self._scheduler_client_instance = None self._worker_client_instance = None self._conductor_client_instance = None - self._replica_cron_client_instance = None + self._transfer_cron_client_instance = None self._minion_manager_client_instance = None try: self._cron = cron.Cron() @@ -510,52 +510,53 @@ def _check_pool_minion_count( "Successfully validated minion pool selections for action '%s' " "with properties: %s", action['id'], action) - def allocate_minion_machines_for_replica( - self, ctxt, replica): + def allocate_minion_machines_for_transfer( + self, ctxt, transfer): try: self._run_machine_allocation_subflow_for_action( - ctxt, replica, - constants.TRANSFER_ACTION_TYPE_REPLICA, + ctxt, transfer, + constants.TRANSFER_ACTION_TYPE_TRANSFER, include_transfer_minions=True, include_osmorphing_minions=False) except Exception as ex: LOG.warn( "Error occurred while allocating minion machines for " - "Replica with ID '%s'. Removing all allocations. " + "Transfer with ID '%s'. Removing all allocations. " "Error was: %s" % ( - replica['id'], utils.get_exception_details())) + transfer['id'], utils.get_exception_details())) self._cleanup_machines_with_statuses_for_action( - ctxt, replica['id'], + ctxt, transfer['id'], [constants.MINION_MACHINE_STATUS_UNINITIALIZED]) self.deallocate_minion_machines_for_action( - ctxt, replica['id']) - self._rpc_conductor_client.report_replica_minions_allocation_error( - ctxt, replica['id'], str(ex)) + ctxt, transfer['id']) + (self._rpc_conductor_client + .report_transfer_minions_allocation_error( + ctxt, transfer['id'], str(ex))) raise - def allocate_minion_machines_for_migration( - self, ctxt, migration, include_transfer_minions=True, + def allocate_minion_machines_for_deployment( + self, ctxt, deployment, include_transfer_minions=True, include_osmorphing_minions=True): try: self._run_machine_allocation_subflow_for_action( - ctxt, migration, - constants.TRANSFER_ACTION_TYPE_MIGRATION, + ctxt, deployment, + constants.TRANSFER_ACTION_TYPE_DEPLOYMENT, include_transfer_minions=include_transfer_minions, include_osmorphing_minions=include_osmorphing_minions) except Exception as ex: LOG.warn( "Error occurred while allocating minion machines for " - "Migration with ID '%s'. Removing all allocations. " + "Deployment with ID '%s'. Removing all allocations. " "Error was: %s" % ( - migration['id'], utils.get_exception_details())) + deployment['id'], utils.get_exception_details())) self._cleanup_machines_with_statuses_for_action( - ctxt, migration['id'], + ctxt, deployment['id'], [constants.MINION_MACHINE_STATUS_UNINITIALIZED]) self.deallocate_minion_machines_for_action( - ctxt, migration['id']) + ctxt, deployment['id']) (self._rpc_conductor_client - .report_migration_minions_allocation_error( - ctxt, migration['id'], str(ex))) + .report_deployment_minions_allocation_error( + ctxt, deployment['id'], str(ex))) raise def _make_minion_machine_allocation_subflow_for_action( @@ -779,7 +780,7 @@ def _run_machine_allocation_subflow_for_action( machine_action_allocation_subflow_name_format = None allocation_failure_reporting_task_class = None allocation_confirmation_reporting_task_class = None - if action_type == constants.TRANSFER_ACTION_TYPE_MIGRATION: + if action_type == constants.TRANSFER_ACTION_TYPE_DEPLOYMENT: allocation_flow_name_format = ( (minion_mgr_tasks. MINION_POOL_MIGRATION_ALLOCATION_FLOW_NAME_FORMAT)) @@ -793,7 +794,7 @@ def _run_machine_allocation_subflow_for_action( machine_action_allocation_subflow_name_format = ( (minion_mgr_tasks. MINION_POOL_ALLOCATE_MACHINES_FOR_MIGRATION_SUBFLOW_NAME_FORMAT)) # noqa: E501 - elif action_type == constants.TRANSFER_ACTION_TYPE_REPLICA: + elif action_type == constants.TRANSFER_ACTION_TYPE_TRANSFER: allocation_flow_name_format = ( (minion_mgr_tasks. MINION_POOL_REPLICA_ALLOCATION_FLOW_NAME_FORMAT)) diff --git a/coriolis/minion_manager/rpc/tasks.py b/coriolis/minion_manager/rpc/tasks.py index a3daef2ab..5e8c6acf4 100644 --- a/coriolis/minion_manager/rpc/tasks.py +++ b/coriolis/minion_manager/rpc/tasks.py @@ -191,7 +191,7 @@ def _get_task_name(self, action_id): def _report_machine_allocation_failure( self, context, action_id, failure_str): - self._conductor_client.report_migration_minions_allocation_error( + self._conductor_client.report_deployment_minions_allocation_error( context, action_id, failure_str) @@ -205,7 +205,7 @@ def _get_task_name(self, action_id): def _report_machine_allocation_failure( self, context, action_id, failure_str): - self._conductor_client.report_replica_minions_allocation_error( + self._conductor_client.report_transfer_minions_allocation_error( context, action_id, failure_str) @@ -379,8 +379,8 @@ def _check_minion_properties( raise exception.MinionMachineAllocationFailure( msg) from ex except ( - exception.InvalidMigrationState, - exception.InvalidReplicaState) as ex: + exception.InvalidDeploymentState, + exception.InvalidTransferState) as ex: msg = ( "The Conductor has refused minion machine allocations for " "%s with ID '%s' as it is purportedly in an invalid state " @@ -410,7 +410,7 @@ def _get_task_name(self, action_id): def _confirm_machine_allocation_for_action( self, context, action_id, machine_allocations): - self._conductor_client.confirm_migration_minions_allocation( + self._conductor_client.confirm_deployment_minions_allocation( context, action_id, machine_allocations) @@ -427,7 +427,7 @@ def _get_task_name(self, action_id): def _confirm_machine_allocation_for_action( self, context, action_id, machine_allocations): - self._conductor_client.confirm_replica_minions_allocation( + self._conductor_client.confirm_transfer_minions_allocation( context, action_id, machine_allocations) diff --git a/coriolis/policies/deployments.py b/coriolis/policies/deployments.py new file mode 100644 index 000000000..a24c9d411 --- /dev/null +++ b/coriolis/policies/deployments.py @@ -0,0 +1,80 @@ +# Copyright 2018 Cloudbase Solutions Srl +# All Rights Reserved. + +from oslo_policy import policy + +from coriolis.policies import base + + +DEPLOYMENTS_POLICY_PREFIX = "%s:deployments" % base.CORIOLIS_POLICIES_PREFIX +DEPLOYMENTS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" + + +def get_deployments_policy_label(rule_label): + return "%s:%s" % ( + DEPLOYMENTS_POLICY_PREFIX, rule_label) + + +DEPLOYMENTS_POLICY_DEFAULT_RULES = [ + policy.DocumentedRuleDefault( + get_deployments_policy_label('create'), + DEPLOYMENTS_POLICY_DEFAULT_RULE, + "Create a deployment", + [ + { + "path": "/deployments", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_deployments_policy_label('list'), + DEPLOYMENTS_POLICY_DEFAULT_RULE, + "List deployments", + [ + { + "path": "/deployments", + "method": "GET" + } + ] + ), + policy.DocumentedRuleDefault( + get_deployments_policy_label('show'), + DEPLOYMENTS_POLICY_DEFAULT_RULE, + "Show details for a deployment", + [ + { + "path": "/deployment/{deployment_id}", + "method": "GET" + } + ] + ), + # TODO(aznashwan): deployment actions should ideally be + # declared in a separate module + policy.DocumentedRuleDefault( + get_deployments_policy_label('cancel'), + DEPLOYMENTS_POLICY_DEFAULT_RULE, + "Cancel a running Deployment", + [ + { + "path": "/deployments/{deployment_id}/actions/", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_deployments_policy_label('delete'), + DEPLOYMENTS_POLICY_DEFAULT_RULE, + "Delete Deployment", + [ + { + "path": "/deployment/{deployment_id}", + "method": "DELETE" + } + ] + ) +] + + +def list_rules(): + return DEPLOYMENTS_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/migrations.py b/coriolis/policies/migrations.py deleted file mode 100644 index f1570b0ba..000000000 --- a/coriolis/policies/migrations.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2018 Cloudbase Solutions Srl -# All Rights Reserved. - -from oslo_policy import policy - -from coriolis.policies import base - - -MIGRATIONS_POLICY_PREFIX = "%s:migrations" % base.CORIOLIS_POLICIES_PREFIX -MIGRATIONS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" - - -def get_migrations_policy_label(rule_label): - return "%s:%s" % ( - MIGRATIONS_POLICY_PREFIX, rule_label) - - -MIGRATIONS_POLICY_DEFAULT_RULES = [ - policy.DocumentedRuleDefault( - get_migrations_policy_label('create'), - MIGRATIONS_POLICY_DEFAULT_RULE, - "Create a migration", - [ - { - "path": "/migrations", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_migrations_policy_label('list'), - MIGRATIONS_POLICY_DEFAULT_RULE, - "List migrations", - [ - { - "path": "/migrations", - "method": "GET" - } - ] - ), - policy.DocumentedRuleDefault( - get_migrations_policy_label('show'), - MIGRATIONS_POLICY_DEFAULT_RULE, - "Show details for a migration", - [ - { - "path": "/migrations/{migration_id}", - "method": "GET" - } - ] - ), - # TODO(aznashwan): migration actions should ideally be - # declared in a separate module - policy.DocumentedRuleDefault( - get_migrations_policy_label('cancel'), - MIGRATIONS_POLICY_DEFAULT_RULE, - "Cancel a running Migration", - [ - { - "path": "/migrations/{migration_id}/actions", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_migrations_policy_label('delete'), - MIGRATIONS_POLICY_DEFAULT_RULE, - "Delete Migration", - [ - { - "path": "/migrations/{migration_id}", - "method": "DELETE" - } - ] - ) -] - - -def list_rules(): - return MIGRATIONS_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/replica_schedules.py b/coriolis/policies/replica_schedules.py deleted file mode 100644 index 430883830..000000000 --- a/coriolis/policies/replica_schedules.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2018 Cloudbase Solutions Srl -# All Rights Reserved. - -from oslo_policy import policy - -from coriolis.policies import base - - -REPLICA_SCHEDULES_POLICY_PREFIX = "%s:replica_schedules" % ( - base.CORIOLIS_POLICIES_PREFIX) -REPLICA_SCHEDULES_POLICY_DEFAULT_RULE = "rule:admin_or_owner" - - -def get_replica_schedules_policy_label(rule_label): - return "%s:%s" % ( - REPLICA_SCHEDULES_POLICY_PREFIX, rule_label) - - -REPLICA_SCHEDULES_POLICY_DEFAULT_RULES = [ - policy.DocumentedRuleDefault( - get_replica_schedules_policy_label('create'), - REPLICA_SCHEDULES_POLICY_DEFAULT_RULE, - "Create a new execution schedule for a given Replica", - [ - { - "path": "/replicas/{replica_id}/schedules", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_schedules_policy_label('list'), - REPLICA_SCHEDULES_POLICY_DEFAULT_RULE, - "List execution schedules for a given Replica", - [ - { - "path": "/replicas/{replica_id}/schedules", - "method": "GET" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_schedules_policy_label('show'), - REPLICA_SCHEDULES_POLICY_DEFAULT_RULE, - "Show details for an execution schedule for a given Replica", - [ - { - "path": "/replicas/{replica_id}/schedules/{schedule_id}", - "method": "GET" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_schedules_policy_label('update'), - REPLICA_SCHEDULES_POLICY_DEFAULT_RULE, - "Update an existing execution schedule for a given Replica", - [ - { - "path": ( - "/replicas/{replica_id}/schedules/{schedule_id}"), - "method": "PUT" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_schedules_policy_label('delete'), - REPLICA_SCHEDULES_POLICY_DEFAULT_RULE, - "Delete an execution schedule for a given Replica", - [ - { - "path": "/replicas/{replica_id}/schedules/{schedule_id}", - "method": "DELETE" - } - ] - ) -] - - -def list_rules(): - return REPLICA_SCHEDULES_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/replica_tasks_executions.py b/coriolis/policies/replica_tasks_executions.py deleted file mode 100644 index f30a299ba..000000000 --- a/coriolis/policies/replica_tasks_executions.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2018 Cloudbase Solutions Srl -# All Rights Reserved. - -from oslo_policy import policy - -from coriolis.policies import base - - -REPLICA_EXECUTIONS_POLICY_PREFIX = "%s:replica_executions" % ( - base.CORIOLIS_POLICIES_PREFIX) -REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" - - -def get_replica_executions_policy_label(rule_label): - return "%s:%s" % ( - REPLICA_EXECUTIONS_POLICY_PREFIX, rule_label) - - -REPLICA_EXECUTIONS_POLICY_DEFAULT_RULES = [ - policy.DocumentedRuleDefault( - get_replica_executions_policy_label('create'), - REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE, - "Create a new execution for a given Replica", - [ - { - "path": "/replicas/{replica_id}/executions", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_executions_policy_label('list'), - REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE, - "List Executions for a given Replica", - [ - { - "path": "/replicas/{replica_id}/executions", - "method": "GET" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_executions_policy_label('show'), - REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE, - "Show details for Replica execution", - [ - { - "path": "/replicas/{replica_id}/executions/{execution_id}", - "method": "GET" - } - ] - ), - # TODO(aznashwan): replica actions should ideally be - # declared in a separate module - policy.DocumentedRuleDefault( - get_replica_executions_policy_label('cancel'), - REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE, - "Cancel a Replica execution", - [ - { - "path": ( - "/replicas/{replica_id}/executions/" - "{execution_id}/actions"), - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_replica_executions_policy_label('delete'), - REPLICA_EXECUTIONS_POLICY_DEFAULT_RULE, - "Delete an execution for a given Replica", - [ - { - "path": "/replicas/{replica_id}/executions/{execution_id}", - "method": "DELETE" - } - ] - ) -] - - -def list_rules(): - return REPLICA_EXECUTIONS_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/replicas.py b/coriolis/policies/replicas.py deleted file mode 100644 index a48f517dc..000000000 --- a/coriolis/policies/replicas.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2018 Cloudbase Solutions Srl -# All Rights Reserved. - -from oslo_policy import policy - -from coriolis.policies import base - - -REPLICAS_POLICY_PREFIX = "%s:replicas" % base.CORIOLIS_POLICIES_PREFIX -REPLICAS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" - - -def get_replicas_policy_label(rule_label): - return "%s:%s" % ( - REPLICAS_POLICY_PREFIX, rule_label) - - -REPLICAS_POLICY_DEFAULT_RULES = [ - policy.DocumentedRuleDefault( - get_replicas_policy_label('create'), - REPLICAS_POLICY_DEFAULT_RULE, - "Create a Replica", - [ - { - "path": "/replicas", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_replicas_policy_label('list'), - REPLICAS_POLICY_DEFAULT_RULE, - "List Replicas", - [ - { - "path": "/replicas", - "method": "GET" - } - ] - ), - policy.DocumentedRuleDefault( - get_replicas_policy_label('show'), - REPLICAS_POLICY_DEFAULT_RULE, - "Show details for Replica", - [ - { - "path": "/replicas/{replica_id}", - "method": "GET" - } - ] - ), - # TODO(aznashwan): replica actions should ideally be - # declared in a separate module - policy.DocumentedRuleDefault( - get_replicas_policy_label('delete_disks'), - REPLICAS_POLICY_DEFAULT_RULE, - "Delete Replica Disks", - [ - { - "path": "/replicas/{replica_id}/actions", - "method": "POST" - } - ] - ), - policy.DocumentedRuleDefault( - get_replicas_policy_label('delete'), - REPLICAS_POLICY_DEFAULT_RULE, - "Delete Replica", - [ - { - "path": "/replicas/{replica_id}", - "method": "DELETE" - } - ] - ), - policy.DocumentedRuleDefault( - get_replicas_policy_label('update'), - REPLICAS_POLICY_DEFAULT_RULE, - "Update Replica", - [ - { - "path": "/replicas/{replica_id}", - "method": "POST" - } - ] - ) - -] - - -def list_rules(): - return REPLICAS_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/transfer_schedules.py b/coriolis/policies/transfer_schedules.py new file mode 100644 index 000000000..409518727 --- /dev/null +++ b/coriolis/policies/transfer_schedules.py @@ -0,0 +1,80 @@ +# Copyright 2018 Cloudbase Solutions Srl +# All Rights Reserved. + +from oslo_policy import policy + +from coriolis.policies import base + + +TRANSFER_SCHEDULES_POLICY_PREFIX = "%s:transfer_schedules" % ( + base.CORIOLIS_POLICIES_PREFIX) +TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE = "rule:admin_or_owner" + + +def get_transfer_schedules_policy_label(rule_label): + return "%s:%s" % ( + TRANSFER_SCHEDULES_POLICY_PREFIX, rule_label) + + +TRANSFER_SCHEDULES_POLICY_DEFAULT_RULES = [ + policy.DocumentedRuleDefault( + get_transfer_schedules_policy_label('create'), + TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE, + "Create a new execution schedule for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/schedules", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_schedules_policy_label('list'), + TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE, + "List execution schedules for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/schedules", + "method": "GET" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_schedules_policy_label('show'), + TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE, + "Show details for an execution schedule for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/schedules/{schedule_id}", + "method": "GET" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_schedules_policy_label('update'), + TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE, + "Update an existing execution schedule for a given Transfer", + [ + { + "path": ( + "/transfers/{transfer_id}/schedules/{schedule_id}"), + "method": "PUT" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_schedules_policy_label('delete'), + TRANSFER_SCHEDULES_POLICY_DEFAULT_RULE, + "Delete an execution schedule for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/schedules/{schedule_id}", + "method": "DELETE" + } + ] + ) +] + + +def list_rules(): + return TRANSFER_SCHEDULES_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/transfer_tasks_executions.py b/coriolis/policies/transfer_tasks_executions.py new file mode 100644 index 000000000..b653149b3 --- /dev/null +++ b/coriolis/policies/transfer_tasks_executions.py @@ -0,0 +1,83 @@ +# Copyright 2018 Cloudbase Solutions Srl +# All Rights Reserved. + +from oslo_policy import policy + +from coriolis.policies import base + + +TRANSFER_EXECUTIONS_POLICY_PREFIX = "%s:transfer_executions" % ( + base.CORIOLIS_POLICIES_PREFIX) +TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" + + +def get_transfer_executions_policy_label(rule_label): + return "%s:%s" % ( + TRANSFER_EXECUTIONS_POLICY_PREFIX, rule_label) + + +TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULES = [ + policy.DocumentedRuleDefault( + get_transfer_executions_policy_label('create'), + TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE, + "Create a new execution for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/executions", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_executions_policy_label('list'), + TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE, + "List Executions for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/executions", + "method": "GET" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_executions_policy_label('show'), + TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE, + "Show details for Transfer execution", + [ + { + "path": "/transfers/{transfer_id}/executions/{execution_id}", + "method": "GET" + } + ] + ), + # TODO(aznashwan): transfer actions should ideally be + # declared in a separate module + policy.DocumentedRuleDefault( + get_transfer_executions_policy_label('cancel'), + TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE, + "Cancel a Transfer execution", + [ + { + "path": ( + "/transfers/{transfer_id}/executions/" + "{execution_id}/actions"), + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfer_executions_policy_label('delete'), + TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULE, + "Delete an execution for a given Transfer", + [ + { + "path": "/transfers/{transfer_id}/executions/{execution_id}", + "method": "DELETE" + } + ] + ) +] + + +def list_rules(): + return TRANSFER_EXECUTIONS_POLICY_DEFAULT_RULES diff --git a/coriolis/policies/transfers.py b/coriolis/policies/transfers.py new file mode 100644 index 000000000..85978d5af --- /dev/null +++ b/coriolis/policies/transfers.py @@ -0,0 +1,92 @@ +# Copyright 2018 Cloudbase Solutions Srl +# All Rights Reserved. + +from oslo_policy import policy + +from coriolis.policies import base + + +TRANSFERS_POLICY_PREFIX = "%s:transfers" % base.CORIOLIS_POLICIES_PREFIX +TRANSFERS_POLICY_DEFAULT_RULE = "rule:admin_or_owner" + + +def get_transfers_policy_label(rule_label): + return "%s:%s" % ( + TRANSFERS_POLICY_PREFIX, rule_label) + + +TRANSFERS_POLICY_DEFAULT_RULES = [ + policy.DocumentedRuleDefault( + get_transfers_policy_label('create'), + TRANSFERS_POLICY_DEFAULT_RULE, + "Create a Transfer", + [ + { + "path": "/transfers", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfers_policy_label('list'), + TRANSFERS_POLICY_DEFAULT_RULE, + "List Transfers", + [ + { + "path": "/transfers", + "method": "GET" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfers_policy_label('show'), + TRANSFERS_POLICY_DEFAULT_RULE, + "Show details for Transfer", + [ + { + "path": "/transfers/{transfer_id}", + "method": "GET" + } + ] + ), + # TODO(aznashwan): transfer actions should ideally be + # declared in a separate module + policy.DocumentedRuleDefault( + get_transfers_policy_label('delete_disks'), + TRANSFERS_POLICY_DEFAULT_RULE, + "Delete Transfer Disks", + [ + { + "path": "/transfers/{transfer_id}/actions", + "method": "POST" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfers_policy_label('delete'), + TRANSFERS_POLICY_DEFAULT_RULE, + "Delete Transfer", + [ + { + "path": "/transfers/{transfer_id}", + "method": "DELETE" + } + ] + ), + policy.DocumentedRuleDefault( + get_transfers_policy_label('update'), + TRANSFERS_POLICY_DEFAULT_RULE, + "Update Transfer", + [ + { + "path": "/transfers/{transfer_id}", + "method": "POST" + } + ] + ) + +] + + +def list_rules(): + return TRANSFERS_POLICY_DEFAULT_RULES diff --git a/coriolis/policy.py b/coriolis/policy.py index fb5694f33..297a4c92e 100644 --- a/coriolis/policy.py +++ b/coriolis/policy.py @@ -9,16 +9,16 @@ from coriolis import exception from coriolis.policies import base +from coriolis.policies import deployments from coriolis.policies import diagnostics from coriolis.policies import endpoints from coriolis.policies import general -from coriolis.policies import migrations from coriolis.policies import minion_pools from coriolis.policies import regions -from coriolis.policies import replica_schedules -from coriolis.policies import replica_tasks_executions -from coriolis.policies import replicas from coriolis.policies import services +from coriolis.policies import transfer_schedules +from coriolis.policies import transfer_tasks_executions +from coriolis.policies import transfers from coriolis import utils @@ -28,8 +28,9 @@ _ENFORCER = None DEFAULT_POLICIES_MODULES = [ - base, endpoints, general, migrations, replicas, replica_schedules, - replica_tasks_executions, diagnostics, regions, services, minion_pools] + base, deployments, endpoints, general, transfers, + transfer_schedules, transfer_tasks_executions, diagnostics, regions, + services, minion_pools] def reset(): diff --git a/coriolis/providers/backup_writers.py b/coriolis/providers/backup_writers.py index fede3a359..d746a160f 100644 --- a/coriolis/providers/backup_writers.py +++ b/coriolis/providers/backup_writers.py @@ -254,7 +254,7 @@ def from_connection_info(cls, info, volumes_info): class SSHBackupWriterImpl(BaseBackupWriterImpl): - def __init__(self, path, disk_id, compress_transfer=None, + def __init__(self, path, disk_id, compress_transfer=False, encoder_count=3): self._msg_id = None self._stdin = None diff --git a/coriolis/providers/factory.py b/coriolis/providers/factory.py index 7a11652ff..ff3a5cdb2 100644 --- a/coriolis/providers/factory.py +++ b/coriolis/providers/factory.py @@ -22,8 +22,8 @@ # classical disk-export-based migrations to Replica-based ones: # constants.PROVIDER_TYPE_EXPORT: base.BaseExportProvider, # constants.PROVIDER_TYPE_IMPORT: base.BaseImportProvider, - constants.PROVIDER_TYPE_REPLICA_EXPORT: base.BaseReplicaExportProvider, - constants.PROVIDER_TYPE_REPLICA_IMPORT: base.BaseReplicaImportProvider, + constants.PROVIDER_TYPE_TRANSFER_EXPORT: base.BaseReplicaExportProvider, + constants.PROVIDER_TYPE_TRANSFER_IMPORT: base.BaseReplicaImportProvider, constants.PROVIDER_TYPE_ENDPOINT: base.BaseEndpointProvider, constants.PROVIDER_TYPE_DESTINATION_ENDPOINT_OPTIONS: base.BaseEndpointDestinationOptionsProvider, @@ -38,15 +38,15 @@ constants.PROVIDER_TYPE_SETUP_LIBS: base.BaseProviderSetupExtraLibsMixin, constants.PROVIDER_TYPE_VALIDATE_MIGRATION_EXPORT: ( base.BaseMigrationExportValidationProvider), - constants.PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT: ( + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT: ( base.BaseReplicaExportValidationProvider), constants.PROVIDER_TYPE_VALIDATE_MIGRATION_IMPORT: ( base.BaseMigrationImportValidationProvider), - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT: ( + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT: ( base.BaseReplicaImportValidationProvider), - constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE: ( + constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE: ( base.BaseUpdateSourceReplicaProvider), - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE: ( + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE: ( base.BaseUpdateDestinationReplicaProvider), constants.PROVIDER_TYPE_SOURCE_ENDPOINT_OPTIONS: ( base.BaseEndpointSourceOptionsProvider), @@ -81,7 +81,7 @@ def get_provider( parent = PROVIDER_TYPE_MAP.get(provider_type) if not parent: continue - if (cls.platform == platform_name and issubclass(cls, parent)): + if cls.platform == platform_name and issubclass(cls, parent): return cls(event_handler) if raise_if_not_found: diff --git a/coriolis/replica_cron/api.py b/coriolis/replica_cron/api.py deleted file mode 100644 index 426814738..000000000 --- a/coriolis/replica_cron/api.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2017 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.conductor.rpc import client as rpc_client - - -class API(object): - def __init__(self): - self._rpc_client = rpc_client.ConductorClient() - - def create(self, ctxt, replica_id, schedule, enabled, - exp_date, shutdown_instance): - return self._rpc_client.create_replica_schedule( - ctxt, replica_id, schedule, enabled, exp_date, - shutdown_instance) - - def get_schedules(self, ctxt, replica_id, expired=True): - return self._rpc_client.get_replica_schedules( - ctxt, replica_id, expired=expired) - - def get_schedule(self, ctxt, replica_id, schedule_id, expired=True): - return self._rpc_client.get_replica_schedule( - ctxt, replica_id, schedule_id, expired=expired) - - def update(self, ctxt, replica_id, schedule_id, update_values): - return self._rpc_client.update_replica_schedule( - ctxt, replica_id, schedule_id, update_values) - - def delete(self, ctxt, replica_id, schedule_id): - self._rpc_client.delete_replica_schedule( - ctxt, replica_id, schedule_id) diff --git a/coriolis/replica_tasks_executions/api.py b/coriolis/replica_tasks_executions/api.py deleted file mode 100644 index f78b5e9eb..000000000 --- a/coriolis/replica_tasks_executions/api.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.conductor.rpc import client as rpc_client - - -class API(object): - def __init__(self): - self._rpc_client = rpc_client.ConductorClient() - - def create(self, ctxt, replica_id, shutdown_instances): - return self._rpc_client.execute_replica_tasks( - ctxt, replica_id, shutdown_instances) - - def delete(self, ctxt, replica_id, execution_id): - self._rpc_client.delete_replica_tasks_execution( - ctxt, replica_id, execution_id) - - def cancel(self, ctxt, replica_id, execution_id, force): - self._rpc_client.cancel_replica_tasks_execution( - ctxt, replica_id, execution_id, force) - - def get_executions(self, ctxt, replica_id, include_tasks=False): - return self._rpc_client.get_replica_tasks_executions( - ctxt, replica_id, include_tasks) - - def get_execution(self, ctxt, replica_id, execution_id): - return self._rpc_client.get_replica_tasks_execution( - ctxt, replica_id, execution_id) diff --git a/coriolis/replicas/api.py b/coriolis/replicas/api.py deleted file mode 100644 index 66890463c..000000000 --- a/coriolis/replicas/api.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2016 Cloudbase Solutions Srl -# All Rights Reserved. - -from coriolis.conductor.rpc import client as rpc_client - - -class API(object): - def __init__(self): - self._rpc_client = rpc_client.ConductorClient() - - def create(self, ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, instances, - network_map, storage_mappings, notes=None, user_scripts=None): - return self._rpc_client.create_instances_replica( - ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - source_environment, destination_environment, instances, - network_map, storage_mappings, notes, user_scripts) - - def update(self, ctxt, replica_id, updated_properties): - return self._rpc_client.update_replica( - ctxt, replica_id, updated_properties) - - def delete(self, ctxt, replica_id): - self._rpc_client.delete_replica(ctxt, replica_id) - - def get_replicas(self, ctxt, include_tasks_executions=False, - include_task_info=False): - return self._rpc_client.get_replicas( - ctxt, include_tasks_executions, - include_task_info=include_task_info) - - def get_replica(self, ctxt, replica_id, include_task_info=False): - return self._rpc_client.get_replica( - ctxt, replica_id, include_task_info=include_task_info) - - def delete_disks(self, ctxt, replica_id): - return self._rpc_client.delete_replica_disks(ctxt, replica_id) diff --git a/coriolis/scheduler/scheduler_utils.py b/coriolis/scheduler/scheduler_utils.py index 00b30a79b..defd6dca9 100644 --- a/coriolis/scheduler/scheduler_utils.py +++ b/coriolis/scheduler/scheduler_utils.py @@ -8,8 +8,8 @@ from coriolis import constants from coriolis.db import api as db_api from coriolis import exception -from coriolis.replica_cron.rpc import client as rpc_cron_client from coriolis.scheduler.rpc import client as rpc_scheduler_client +from coriolis.transfer_cron.rpc import client as rpc_cron_client from coriolis.worker.rpc import client as rpc_worker_client @@ -21,8 +21,8 @@ constants.WORKER_MAIN_MESSAGING_TOPIC: rpc_worker_client.WorkerClient, constants.SCHEDULER_MAIN_MESSAGING_TOPIC: ( rpc_scheduler_client.SchedulerClient), - constants.REPLICA_CRON_MAIN_MESSAGING_TOPIC: ( - rpc_cron_client.ReplicaCronClient) + constants.TRANSFER_CRON_MAIN_MESSAGING_TOPIC: ( + rpc_cron_client.TransferCronClient) } diff --git a/coriolis/schemas.py b/coriolis/schemas.py index 278a25dd4..3a84be732 100644 --- a/coriolis/schemas.py +++ b/coriolis/schemas.py @@ -31,7 +31,7 @@ _CORIOLIS_VM_INSTANCE_INFO_SCHEMA_NAME = "vm_instance_info_schema.json" _CORIOLIS_OS_MORPHING_RES_SCHEMA_NAME = "os_morphing_resources_schema.json" _CORIOLIS_VM_NETWORK_SCHEMA_NAME = "vm_network_schema.json" -_SCHEDULE_API_BODY_SCHEMA_NAME = "replica_schedule_schema.json" +_SCHEDULE_API_BODY_SCHEMA_NAME = "transfer_schedule_schema.json" _CORIOLIS_DESTINATION_OPTIONS_SCHEMA_NAME = "destination_options_schema.json" _CORIOLIS_SOURCE_OPTIONS_SCHEMA_NAME = "source_options_schema.json" _CORIOLIS_NETWORK_MAP_SCHEMA_NAME = "network_map_schema.json" diff --git a/coriolis/schemas/disk_sync_resources_info_schema.json b/coriolis/schemas/disk_sync_resources_info_schema.json index 0aa0becbb..4a86c3752 100644 --- a/coriolis/schemas/disk_sync_resources_info_schema.json +++ b/coriolis/schemas/disk_sync_resources_info_schema.json @@ -1,7 +1,7 @@ { "$schema": "http://cloudbase.it/coriolis/schemas/disk_sync_resources_info#", "type": "object", - "description": "Information returned after the 'DEPLOY_REPLICA_TARGET_RESOURCES' task and passed to 'REPLICATE_DISKS', as well as for 'DEPLOY_DISK_COPY_RESOURCES' and 'COPY_DISKS_DATA'. The only required property is the 'volumes_info', and the provider plugins may freely declare and use any other fields.", + "description": "Information returned after the 'DEPLOY_TRANSFER_DISKS' task and passed to 'DEPLOY_TRANSFER_TARGET_RESOURCES' and 'REPLICATE_DISKS'. The only required property is the 'volumes_info', and the provider plugins may freely declare and use any other fields.", "properties": { "volumes_info": { "type": "array", diff --git a/coriolis/schemas/replica_schedule_schema.json b/coriolis/schemas/transfer_schedule_schema.json similarity index 93% rename from coriolis/schemas/replica_schedule_schema.json rename to coriolis/schemas/transfer_schedule_schema.json index 89dba79a5..b7c6285dd 100644 --- a/coriolis/schemas/replica_schedule_schema.json +++ b/coriolis/schemas/transfer_schedule_schema.json @@ -1,5 +1,5 @@ { - "$schema": "http://cloudbase.it/coriolis/schemas/replica_schedule_schema#", + "$schema": "http://cloudbase.it/coriolis/schemas/transfer_schedule_schema#", "type": "object", "properties": { "schedule": { diff --git a/coriolis/tasks/factory.py b/coriolis/tasks/factory.py index a3df2e9d6..e638ea451 100644 --- a/coriolis/tasks/factory.py +++ b/coriolis/tasks/factory.py @@ -9,32 +9,12 @@ from coriolis.tasks import replica_tasks _TASKS_MAP = { - constants.TASK_TYPE_DEPLOY_MIGRATION_SOURCE_RESOURCES: - migration_tasks.DeployMigrationSourceResourcesTask, - constants.TASK_TYPE_DEPLOY_MIGRATION_TARGET_RESOURCES: - migration_tasks.DeployMigrationTargetResourcesTask, - constants.TASK_TYPE_DELETE_MIGRATION_SOURCE_RESOURCES: - migration_tasks.DeleteMigrationSourceResourcesTask, - constants.TASK_TYPE_DELETE_MIGRATION_TARGET_RESOURCES: - migration_tasks.DeleteMigrationTargetResourcesTask, - constants.TASK_TYPE_DEPLOY_INSTANCE_RESOURCES: - migration_tasks.DeployInstanceResourcesTask, constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT: migration_tasks.FinalizeInstanceDeploymentTask, - constants.TASK_TYPE_CREATE_INSTANCE_DISKS: - migration_tasks.CreateInstanceDisksTask, constants.TASK_TYPE_CLEANUP_FAILED_INSTANCE_DEPLOYMENT: migration_tasks.CleanupFailedInstanceDeploymentTask, - constants.TASK_TYPE_CLEANUP_INSTANCE_TARGET_STORAGE: - migration_tasks.CleanupInstanceTargetStorageTask, - constants.TASK_TYPE_CLEANUP_INSTANCE_SOURCE_STORAGE: - migration_tasks.CleanupInstanceSourceStorageTask, constants.TASK_TYPE_GET_OPTIMAL_FLAVOR: migration_tasks.GetOptimalFlavorTask, - constants.TASK_TYPE_VALIDATE_MIGRATION_SOURCE_INPUTS: - migration_tasks.ValidateMigrationSourceInputsTask, - constants.TASK_TYPE_VALIDATE_MIGRATION_DESTINATION_INPUTS: - migration_tasks.ValidateMigrationDestinationInputsTask, constants.TASK_TYPE_DEPLOY_OS_MORPHING_RESOURCES: osmorphing_tasks.DeployOSMorphingResourcesTask, constants.TASK_TYPE_OS_MORPHING: @@ -47,41 +27,37 @@ replica_tasks.ReplicateDisksTask, constants.TASK_TYPE_SHUTDOWN_INSTANCE: replica_tasks.ShutdownInstanceTask, - constants.TASK_TYPE_DEPLOY_REPLICA_DISKS: + constants.TASK_TYPE_DEPLOY_TRANSFER_DISKS: replica_tasks.DeployReplicaDisksTask, - constants.TASK_TYPE_DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS: + constants.TASK_TYPE_DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS: replica_tasks.DeleteReplicaSourceDiskSnapshotsTask, - constants.TASK_TYPE_DELETE_REPLICA_DISKS: + constants.TASK_TYPE_DELETE_TRANSFER_DISKS: replica_tasks.DeleteReplicaDisksTask, - constants.TASK_TYPE_DEPLOY_REPLICA_TARGET_RESOURCES: + constants.TASK_TYPE_DEPLOY_TRANSFER_TARGET_RESOURCES: replica_tasks.DeployReplicaTargetResourcesTask, - constants.TASK_TYPE_DELETE_REPLICA_TARGET_RESOURCES: + constants.TASK_TYPE_DELETE_TRANSFER_TARGET_RESOURCES: replica_tasks.DeleteReplicaTargetResourcesTask, - constants.TASK_TYPE_DEPLOY_REPLICA_SOURCE_RESOURCES: + constants.TASK_TYPE_DEPLOY_TRANSFER_SOURCE_RESOURCES: replica_tasks.DeployReplicaSourceResourcesTask, - constants.TASK_TYPE_DELETE_REPLICA_SOURCE_RESOURCES: + constants.TASK_TYPE_DELETE_TRANSFER_SOURCE_RESOURCES: replica_tasks.DeleteReplicaSourceResourcesTask, - constants.TASK_TYPE_DEPLOY_REPLICA_INSTANCE_RESOURCES: + constants.TASK_TYPE_DEPLOY_INSTANCE_RESOURCES: replica_tasks.DeployReplicaInstanceResourcesTask, - constants.TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT: - replica_tasks.FinalizeReplicaInstanceDeploymentTask, - constants.TASK_TYPE_CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT: - replica_tasks.CleanupFailedReplicaInstanceDeploymentTask, - constants.TASK_TYPE_CREATE_REPLICA_DISK_SNAPSHOTS: + constants.TASK_TYPE_CREATE_TRANSFER_DISK_SNAPSHOTS: replica_tasks.CreateReplicaDiskSnapshotsTask, - constants.TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS: + constants.TASK_TYPE_DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS: replica_tasks.DeleteReplicaTargetDiskSnapshotsTask, - constants.TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS: + constants.TASK_TYPE_RESTORE_TRANSFER_DISK_SNAPSHOTS: replica_tasks.RestoreReplicaDiskSnapshotsTask, - constants.TASK_TYPE_VALIDATE_REPLICA_SOURCE_INPUTS: + constants.TASK_TYPE_VALIDATE_TRANSFER_SOURCE_INPUTS: replica_tasks.ValidateReplicaExecutionSourceInputsTask, - constants.TASK_TYPE_VALIDATE_REPLICA_DESTINATION_INPUTS: + constants.TASK_TYPE_VALIDATE_TRANSFER_DESTINATION_INPUTS: replica_tasks.ValidateReplicaExecutionDestinationInputsTask, - constants.TASK_TYPE_VALIDATE_REPLICA_DEPLOYMENT_INPUTS: + constants.TASK_TYPE_VALIDATE_DEPLOYMENT_INPUTS: replica_tasks.ValidateReplicaDeploymentParametersTask, - constants.TASK_TYPE_UPDATE_SOURCE_REPLICA: + constants.TASK_TYPE_UPDATE_SOURCE_TRANSFER: replica_tasks.UpdateSourceReplicaTask, - constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA: + constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER: replica_tasks.UpdateDestinationReplicaTask, constants.TASK_TYPE_VALIDATE_SOURCE_MINION_POOL_OPTIONS: minion_pool_tasks.ValidateSourceMinionPoolOptionsTask, diff --git a/coriolis/tasks/osmorphing_tasks.py b/coriolis/tasks/osmorphing_tasks.py index 059698103..3005a5252 100644 --- a/coriolis/tasks/osmorphing_tasks.py +++ b/coriolis/tasks/osmorphing_tasks.py @@ -34,20 +34,20 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT], + constants.PROVIDER_TYPE_TRANSFER_EXPORT], constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT], + constants.PROVIDER_TYPE_TRANSFER_IMPORT], } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): origin_provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) destination_provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) osmorphing_connection_info = base.unmarshal_migr_conn_info( diff --git a/coriolis/tasks/replica_tasks.py b/coriolis/tasks/replica_tasks.py index f2c195dc2..bdc2382ef 100644 --- a/coriolis/tasks/replica_tasks.py +++ b/coriolis/tasks/replica_tasks.py @@ -93,13 +93,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) @@ -134,13 +134,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) @@ -175,13 +175,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) export_info = task_info["export_info"] @@ -245,7 +245,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -254,7 +254,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, export_info = task_info["export_info"] provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -291,7 +291,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -305,7 +305,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, return {'volumes_info': []} provider = providers_factory.get_provider( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) source_environment = task_info['source_environment'] @@ -337,7 +337,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -351,7 +351,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, return {'volumes_info': []} provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -389,13 +389,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) @@ -460,13 +460,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin["type"], constants.PROVIDER_TYPE_TRANSFER_EXPORT, event_handler) connection_info = base.get_connection_info(ctxt, origin) @@ -504,7 +504,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _validate_connection_info(self, migr_connection_info): @@ -535,7 +535,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, export_info = task_info['export_info'] provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -603,13 +603,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -644,7 +644,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -653,7 +653,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, export_info = task_info["export_info"] provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -688,13 +688,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) target_environment = task_info["target_environment"] @@ -730,13 +730,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) target_environment = task_info["target_environment"] @@ -768,13 +768,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) export_info = task_info['export_info'] @@ -812,14 +812,14 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): export_info = task_info['export_info'] provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) @@ -856,13 +856,13 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, event_handler): provider = providers_factory.get_provider( - destination["type"], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination["type"], constants.PROVIDER_TYPE_TRANSFER_IMPORT, event_handler) connection_info = base.get_connection_info(ctxt, destination) export_info = task_info['export_info'] @@ -900,7 +900,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT] + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -908,7 +908,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, event_manager = events.EventManager(event_handler) origin_type = origin["type"] source_provider = providers_factory.get_provider( - origin_type, constants.PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT, + origin_type, constants.PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT, event_handler, raise_if_not_found=False) origin_connection_info = base.get_connection_info(ctxt, origin) if not source_provider: @@ -941,7 +941,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT] } def _validate_provider_replica_import_input( @@ -960,7 +960,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, ctxt, destination) destination_provider = providers_factory.get_provider( destination_type, - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, event_handler, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, event_handler, raise_if_not_found=False) if not destination_provider: event_manager.progress_update( @@ -1001,7 +1001,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT] + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -1018,7 +1018,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, # validate destination params: destination_provider = providers_factory.get_provider( destination_type, - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, event_handler, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, event_handler, raise_if_not_found=False) if not destination_provider: event_manager.progress_update( @@ -1054,7 +1054,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_SOURCE: [ - constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE] + constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -1075,7 +1075,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, 'source_environment': old_source_env} source_provider = providers_factory.get_provider( - origin["type"], constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE, + origin["type"], constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE, event_handler, raise_if_not_found=False) if not source_provider: raise exception.InvalidActionTasksExecutionState( @@ -1122,7 +1122,7 @@ def get_returned_task_info_properties(cls): def get_required_provider_types(cls): return { constants.PROVIDER_PLATFORM_DESTINATION: [ - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE] + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE] } def _run(self, ctxt, instance, origin, destination, task_info, @@ -1144,7 +1144,7 @@ def _run(self, ctxt, instance, origin, destination, task_info, destination_provider = providers_factory.get_provider( destination["type"], - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE, + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE, event_handler, raise_if_not_found=False) if not destination_provider: raise exception.InvalidActionTasksExecutionState( diff --git a/coriolis/tests/api/v1/__init__py b/coriolis/tests/api/v1/__init__py deleted file mode 100644 index e69de29bb..000000000 diff --git a/coriolis/tests/api/v1/data/migration_create.yml b/coriolis/tests/api/v1/data/migration_create.yml deleted file mode 100644 index dd220e230..000000000 --- a/coriolis/tests/api/v1/data/migration_create.yml +++ /dev/null @@ -1,28 +0,0 @@ - -- config: - migration: - user_scripts: - mock_user_scripts: null - instances: ["mock_instance1", "mock_instance2"] - replica_id: 'mock_replica_id' - clone_disks: True - force: False - skip_os_morphing: False - instance_osmorphing_minion_pool_mappings: - mock_mapping: "mock_value" - expected_api_method: "deploy_replica_instances" - validation_expected: False - -- config: - migration: - user_scripts: - mock_user_scripts: null - instances: ["mock_instance1", "mock_instance2"] - replica_id: null - clone_disks: True - force: False - skip_os_morphing: False - instance_osmorphing_minion_pool_mappings: - mock_mapping: "mock_value" - expected_api_method: "migrate_instances" - validation_expected: True diff --git a/coriolis/tests/api/v1/data/migration_validate_input.yml b/coriolis/tests/api/v1/data/migration_validate_input.yml deleted file mode 100644 index d8f7d680f..000000000 --- a/coriolis/tests/api/v1/data/migration_validate_input.yml +++ /dev/null @@ -1,48 +0,0 @@ - -- config: - migration: - origin_endpoint_id: "mock_origin_endpoint_id" - destination_endpoint_id: "mock_destination_endpoint_id" - origin_minion_pool_id: "mock_origin_minion_pool_id" - destination_minion_pool_id: "mock_destination_minion_pool_id" - instance_osmorphing_minion_pool_mappings: - mock_instance_1: "mock_pool" - mock_instance_2: "mock_pool" - instances: ['mock_instance_1', 'mock_instance_2'] - notes: "mock_notes" - skip_os_morphing: false - shutdown_instances: false - replication_count: 2 - source_environment: {} - network_map: {} - destination_environment: - network_map: {} - storage_mappings: {} - storage_mappings: {} - raises_value_error: false - -- config: - migration: - origin_endpoint_id: "mock_origin_endpoint_id" - destination_endpoint_id: "mock_destination_endpoint_id" - origin_minion_pool_id: "mock_origin_minion_pool_id" - destination_minion_pool_id: "mock_destination_minion_pool_id" - instance_osmorphing_minion_pool_mappings: - mock_instance_1: "mock_pool" - mock_instance_2: "mock_pool" - instances: ['mock_instance_1', 'mock_instance_3'] - raises_value_error: true - - -- config: - migration: - origin_endpoint_id: "mock_origin_endpoint_id" - destination_endpoint_id: "mock_destination_endpoint_id" - origin_minion_pool_id: "mock_origin_minion_pool_id" - destination_minion_pool_id: "mock_destination_minion_pool_id" - instance_osmorphing_minion_pool_mappings: - mock_instance_1: "mock_pool" - mock_instance_2: "mock_pool" - instances: ['mock_instance_1', 'mock_instance_2'] - replication_count: 13 - raises_value_error: true \ No newline at end of file diff --git a/coriolis/tests/api/v1/data/replica_task_execution_actions_cancel.yml b/coriolis/tests/api/v1/data/transfer_task_execution_actions_cancel.yml similarity index 100% rename from coriolis/tests/api/v1/data/replica_task_execution_actions_cancel.yml rename to coriolis/tests/api/v1/data/transfer_task_execution_actions_cancel.yml diff --git a/coriolis/tests/api/v1/data/replicas_get_merged_replica_values.yml b/coriolis/tests/api/v1/data/transfers_get_merged_transfer_values.yml similarity index 98% rename from coriolis/tests/api/v1/data/replicas_get_merged_replica_values.yml rename to coriolis/tests/api/v1/data/transfers_get_merged_transfer_values.yml index 83d5a4a52..83f2abe21 100644 --- a/coriolis/tests/api/v1/data/replicas_get_merged_replica_values.yml +++ b/coriolis/tests/api/v1/data/transfers_get_merged_transfer_values.yml @@ -1,6 +1,6 @@ - config: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" destination_endpoint_id: "mock_destination_endpoint_id" source_environment: {'mock_source_key': 'mock_source_value'} @@ -14,7 +14,7 @@ instance_osmorphing_minion_pool_mappings: mock_instance_1: "mock_pool_1" mock_instance_2: "mock_pool_2" - updated_values: + updated_values: source_environment: {'mock_updated_source_key': 'mock_updated_source_value'} destination_environment: storage_mappings: {'mock_updated_destination_key': 'mock_updated_destination_value'} @@ -47,7 +47,7 @@ mock_instance_2: "mock_updated_pool_2" - config: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" destination_endpoint_id: "mock_destination_endpoint_id" source_environment: {'mock_source_key': 'mock_source_value'} @@ -75,7 +75,7 @@ notes: "mock_notes" - config: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" destination_endpoint_id: "mock_destination_endpoint_id" user_scripts: {'mock_scripts_key': 'mock_scripts_value'} diff --git a/coriolis/tests/api/v1/data/replicas_update_storage_mappings.yml b/coriolis/tests/api/v1/data/transfers_update_storage_mappings.yml similarity index 100% rename from coriolis/tests/api/v1/data/replicas_update_storage_mappings.yml rename to coriolis/tests/api/v1/data/transfers_update_storage_mappings.yml diff --git a/coriolis/tests/api/v1/data/replicas_validate_create_body.yml b/coriolis/tests/api/v1/data/transfers_validate_create_body.yml similarity index 96% rename from coriolis/tests/api/v1/data/replicas_validate_create_body.yml rename to coriolis/tests/api/v1/data/transfers_validate_create_body.yml index b4c9059b4..9e2291865 100644 --- a/coriolis/tests/api/v1/data/replicas_validate_create_body.yml +++ b/coriolis/tests/api/v1/data/transfers_validate_create_body.yml @@ -1,7 +1,7 @@ - config: body: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" destination_endpoint_id: "mock_destination_endpoint_id" source_environment: "mock_source_environment" @@ -19,6 +19,7 @@ storage_mappings: "mock_storage_mappings" exception_raised: False expected_result: + - replica - mock_origin_endpoint_id - mock_destination_endpoint_id - mock_source_environment @@ -35,7 +36,7 @@ - config: body: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" destination_endpoint_id: "mock_destination_endpoint_id" source_environment: "mock_source_environment" @@ -52,5 +53,5 @@ user_scripts: "mock_user_scripts" storage_mappings: "mock_storage_mappings" exception_raised: "One or more instance OSMorphing pool mappings were" - expected_result: + expected_result: diff --git a/coriolis/tests/api/v1/data/replicas_validate_update_body.yml b/coriolis/tests/api/v1/data/transfers_validate_update_body.yml similarity index 98% rename from coriolis/tests/api/v1/data/replicas_validate_update_body.yml rename to coriolis/tests/api/v1/data/transfers_validate_update_body.yml index 5b763f1ca..2448e5171 100644 --- a/coriolis/tests/api/v1/data/replicas_validate_update_body.yml +++ b/coriolis/tests/api/v1/data/transfers_validate_update_body.yml @@ -1,7 +1,7 @@ - config: body: - replica: + transfer: source_environment: "mock_source_environment" destination_environment: "mock_destination_environment" storage_mappings: {'mock_updated_destination_key': 'mock_updated_destination_value'} @@ -13,7 +13,7 @@ instance_osmorphing_minion_pool_mappings: mock_instance_1: "mock_updated_pool_1" mock_instance_2: "mock_updated_pool_2" - replica: + transfer: destination_endpoint_id: "mock_destination_endpoint_id" origin_endpoint_id: "mock_origin_endpoint_id" instances: "mock_instances" diff --git a/coriolis/tests/api/v1/data/replicas_validate_update_body_raises.yml b/coriolis/tests/api/v1/data/transfers_validate_update_body_raises.yml similarity index 80% rename from coriolis/tests/api/v1/data/replicas_validate_update_body_raises.yml rename to coriolis/tests/api/v1/data/transfers_validate_update_body_raises.yml index 37dfd585f..bd246ffb5 100644 --- a/coriolis/tests/api/v1/data/replicas_validate_update_body_raises.yml +++ b/coriolis/tests/api/v1/data/transfers_validate_update_body_raises.yml @@ -1,13 +1,13 @@ - body: - replica: + transfer: origin_endpoint_id: "mock_origin_endpoint_id" - body: - replica: + transfer: destination_endpoint_id: "mock_destination_endpoint_id" - body: - replica: + transfer: instances: "instances" diff --git a/coriolis/tests/api/v1/test_migration_actions.py b/coriolis/tests/api/v1/test_migration_actions.py deleted file mode 100644 index 563bd6af8..000000000 --- a/coriolis/tests/api/v1/test_migration_actions.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2023 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from webob import exc - -from coriolis.api.v1 import migration_actions -from coriolis import exception -from coriolis.migrations import api -from coriolis.tests import test_base -from coriolis.tests import testutils - - -class MigrationActionsControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Migration Actions v1 API""" - - def setUp(self): - super(MigrationActionsControllerTestCase, self).setUp() - self.migration_actions = migration_actions.MigrationActionsController() - - @mock.patch.object(api.API, 'cancel') - def test__cancel( - self, - mock_cancel, - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_body = { - 'cancel': { - 'force': False - } - } - - self.assertRaises( - exc.HTTPNoContent, - testutils.get_wrapped_function(self.migration_actions._cancel), - mock_req, - id, - mock_body - ) - - mock_context.can.assert_called_once_with("migration:migrations:cancel") - mock_cancel.assert_called_once_with(mock_context, id, False) - - @mock.patch.object(api.API, 'cancel') - def test__cancel_empty( - self, - mock_cancel, - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_body = {'cancel': {}} - - self.assertRaises( - exc.HTTPNoContent, - testutils.get_wrapped_function(self.migration_actions._cancel), - mock_req, - id, - mock_body - ) - - mock_context.can.assert_called_once_with("migration:migrations:cancel") - mock_cancel.assert_called_once_with(mock_context, id, False) - - @mock.patch.object(api.API, 'cancel') - def test__cancel_not_found( - self, - mock_cancel, - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_body = {'cancel': {}} - mock_cancel.side_effect = exception.NotFound() - - self.assertRaises( - exc.HTTPNotFound, - testutils.get_wrapped_function(self.migration_actions._cancel), - mock_req, - id, - mock_body - ) - - mock_context.can.assert_called_once_with("migration:migrations:cancel") - mock_cancel.assert_called_once_with(mock_context, id, False) - - @mock.patch.object(api.API, 'cancel') - def test__cancel_invalid_parameter_value( - self, - mock_cancel, - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_body = {'cancel': {}} - mock_cancel.side_effect = exception.InvalidParameterValue("err") - - self.assertRaises( - exc.HTTPNotFound, - testutils.get_wrapped_function(self.migration_actions._cancel), - mock_req, - id, - mock_body - ) - - mock_context.can.assert_called_once_with("migration:migrations:cancel") - mock_cancel.assert_called_once_with(mock_context, id, False) diff --git a/coriolis/tests/api/v1/test_migrations.py b/coriolis/tests/api/v1/test_migrations.py deleted file mode 100644 index 672327185..000000000 --- a/coriolis/tests/api/v1/test_migrations.py +++ /dev/null @@ -1,260 +0,0 @@ -# Copyright 2023 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock -from webob import exc - -import ddt - -from coriolis.api.v1 import migrations -from coriolis.api.v1 import utils as api_utils -from coriolis.api.v1.views import migration_view -from coriolis.endpoints import api as endpoints_api -from coriolis import exception -from coriolis.migrations import api -from coriolis.tests import test_base -from coriolis.tests import testutils - - -@ddt.ddt -class MigrationControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Migrations v1 API""" - - def setUp(self): - super(MigrationControllerTestCase, self).setUp() - self.migrations = migrations.MigrationController() - - @mock.patch.object(migration_view, 'single') - @mock.patch.object(api.API, 'get_migration') - @mock.patch('coriolis.api.v1.migrations.CONF') - def test_show( - self, - mock_conf, - mock_get_migration, - mock_single - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_conf.api.include_task_info_in_migrations_api = False - - result = self.migrations.show(mock_req, id) - - self.assertEqual( - mock_single.return_value, - result - ) - - mock_context.can.assert_called_once_with("migration:migrations:show") - mock_get_migration.assert_called_once_with( - mock_context, id, include_task_info=False - ) - mock_single.assert_called_once_with(mock_get_migration.return_value) - - @mock.patch.object(api.API, 'get_migration') - @mock.patch('coriolis.api.v1.migrations.CONF') - def test_show_no_migration( - self, - mock_conf, - mock_get_migration - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_conf.api.include_task_info_in_migrations_api = False - mock_get_migration.return_value = None - - self.assertRaises( - exc.HTTPNotFound, - self.migrations.show, - mock_req, - id - ) - - mock_context.can.assert_called_once_with("migration:migrations:show") - mock_get_migration.assert_called_once_with( - mock_context, id, include_task_info=False - ) - - @mock.patch.object(migration_view, 'collection') - @mock.patch.object(api.API, 'get_migrations') - @mock.patch.object(api_utils, '_get_show_deleted') - @mock.patch('coriolis.api.v1.migrations.CONF') - def test__list( - self, - mock_conf, - mock__get_show_deleted, - mock_get_migrations, - mock_collection - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - mock_conf.api.include_task_info_in_migrations_api = False - - result = self.migrations._list(mock_req) - - self.assertEqual( - mock_collection.return_value, - result - ) - self.assertEqual( - mock_context.show_deleted, - mock__get_show_deleted.return_value - ) - - mock__get_show_deleted.assert_called_once_with( - mock_req.GET.get.return_value) - mock_context.can.assert_called_once_with("migration:migrations:list") - mock_get_migrations.assert_called_once_with( - mock_context, - include_tasks=False, - include_task_info=False - ) - - @mock.patch.object(api_utils, 'validate_storage_mappings') - @mock.patch.object(endpoints_api.API, 'validate_target_environment') - @mock.patch.object(api_utils, 'validate_network_map') - @mock.patch.object(endpoints_api.API, 'validate_source_environment') - @mock.patch.object(api_utils, 'validate_instances_list_for_transfer') - @ddt.file_data('data/migration_validate_input.yml') - @ddt.unpack - def test__validate_migration_input( - self, - mock_validate_instances_list_for_transfer, - mock_validate_source_environment, - mock_validate_network_map, - mock_validate_target_environment, - mock_validate_storage_mappings, - config, - raises_value_error, - ): - mock_context = mock.Mock() - mock_validate_instances_list_for_transfer.return_value = \ - config['migration']['instances'] - - if raises_value_error: - self.assertRaises( - ValueError, - testutils.get_wrapped_function( - self.migrations._validate_migration_input), - self.migrations, - context=mock_context, - body=config - ) - mock_validate_instances_list_for_transfer.assert_called_once() - else: - testutils.get_wrapped_function( - self.migrations._validate_migration_input)( - self.migrations, - context=mock_context, # type: ignore - body=config, # type: ignore - ) - mock_validate_source_environment.assert_called_once_with( - mock_context, - config['migration']['origin_endpoint_id'], - config['migration']['source_environment'] - ) - mock_validate_network_map.assert_called_once_with( - config['migration']['network_map'] - ) - mock_validate_target_environment.assert_called_once_with( - mock_context, - config['migration']['destination_endpoint_id'], - config['migration']['destination_environment'] - ) - mock_validate_storage_mappings.assert_called_once_with( - config['migration']['storage_mappings'] - ) - mock_validate_instances_list_for_transfer.assert_called_once_with( - config['migration']['instances'], - ) - - @mock.patch.object(migration_view, 'single') - @mock.patch.object(migrations.MigrationController, - '_validate_migration_input') - @mock.patch.object(api_utils, 'normalize_user_scripts') - @mock.patch.object(api_utils, 'validate_user_scripts') - @ddt.file_data('data/migration_create.yml') - @ddt.unpack - def test_create( - self, - mock_validate_user_scripts, - mock_normalize_user_scripts, - mock__validate_migration_input, - mock_single, - config, - expected_api_method, - validation_expected, - ): - with mock.patch.object(api.API, - expected_api_method) as mock_api_method: - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - mock__validate_migration_input.return_value = \ - (mock.sentinel.value,) * 14 - - result = self.migrations.create(mock_req, config) - - self.assertEqual( - mock_single.return_value, - result - ) - - mock_context.can.assert_called_once_with( - "migration:migrations:create") - mock_validate_user_scripts.assert_called_once_with( - config['migration']['user_scripts']) - mock_normalize_user_scripts.assert_called_once_with( - config['migration']['user_scripts'], - config['migration']['instances'] - ) - if validation_expected: - mock__validate_migration_input.assert_called_once_with( - mock_context, config) - mock_api_method.assert_called_once() - mock_single.assert_called_once_with(mock_api_method.return_value) - - @mock.patch.object(api.API, 'delete') - def test_delete( - self, - mock_delete - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - - self.assertRaises( - exc.HTTPNoContent, - self.migrations.delete, - mock_req, - id - ) - - mock_context.can.assert_called_once_with("migration:migrations:delete") - mock_delete.assert_called_once_with(mock_context, id) - - @mock.patch.object(api.API, 'delete') - def test_delete_not_found( - self, - mock_delete - ): - mock_req = mock.Mock() - mock_context = mock.Mock() - mock_req.environ = {'coriolis.context': mock_context} - id = mock.sentinel.id - mock_delete.side_effect = exception.NotFound() - - self.assertRaises( - exc.HTTPNotFound, - self.migrations.delete, - mock_req, - id - ) - - mock_context.can.assert_called_once_with("migration:migrations:delete") - mock_delete.assert_called_once_with(mock_context, id) diff --git a/coriolis/tests/api/v1/test_router.py b/coriolis/tests/api/v1/test_router.py index 29d13a190..3dc145496 100644 --- a/coriolis/tests/api/v1/test_router.py +++ b/coriolis/tests/api/v1/test_router.py @@ -3,6 +3,8 @@ from unittest import mock +from coriolis.api.v1 import deployment_actions +from coriolis.api.v1 import deployments from coriolis.api.v1 import diagnostics from coriolis.api.v1 import endpoint_actions from coriolis.api.v1 import endpoint_destination_minion_pool_options @@ -13,20 +15,18 @@ from coriolis.api.v1 import endpoint_source_options from coriolis.api.v1 import endpoint_storage from coriolis.api.v1 import endpoints -from coriolis.api.v1 import migration_actions -from coriolis.api.v1 import migrations from coriolis.api.v1 import minion_pool_actions from coriolis.api.v1 import minion_pools from coriolis.api.v1 import provider_schemas from coriolis.api.v1 import providers from coriolis.api.v1 import regions -from coriolis.api.v1 import replica_actions -from coriolis.api.v1 import replica_schedules -from coriolis.api.v1 import replica_tasks_execution_actions -from coriolis.api.v1 import replica_tasks_executions -from coriolis.api.v1 import replicas from coriolis.api.v1 import router from coriolis.api.v1 import services +from coriolis.api.v1 import transfer_actions +from coriolis.api.v1 import transfer_schedules +from coriolis.api.v1 import transfer_tasks_execution_actions +from coriolis.api.v1 import transfer_tasks_executions +from coriolis.api.v1 import transfers from coriolis.tests import test_base @@ -37,14 +37,14 @@ def setUp(self): super(APIRouterTestCase, self).setUp() self.router = router.APIRouter() + @mock.patch.object(deployments, 'create_resource') + @mock.patch.object(deployment_actions, 'create_resource') @mock.patch.object(diagnostics, 'create_resource') - @mock.patch.object(replica_schedules, 'create_resource') - @mock.patch.object(replica_tasks_execution_actions, 'create_resource') - @mock.patch.object(replica_tasks_executions, 'create_resource') - @mock.patch.object(replica_actions, 'create_resource') - @mock.patch.object(replicas, 'create_resource') - @mock.patch.object(migration_actions, 'create_resource') - @mock.patch.object(migrations, 'create_resource') + @mock.patch.object(transfer_schedules, 'create_resource') + @mock.patch.object(transfer_tasks_execution_actions, 'create_resource') + @mock.patch.object(transfer_tasks_executions, 'create_resource') + @mock.patch.object(transfer_actions, 'create_resource') + @mock.patch.object(transfers, 'create_resource') @mock.patch.object(provider_schemas, 'create_resource') @mock.patch.object(endpoint_source_options, 'create_resource') @mock.patch.object(endpoint_destination_options, 'create_resource') @@ -78,14 +78,14 @@ def test_setup_routes( mock_endpoint_destination_options_create_resource, mock_endpoint_source_options_create_resource, mock_provider_schemas_create_resource, - mock_migrations_create_resource, - mock_migration_actions_create_resource, - mock_replicas_create_resource, - mock_replica_actions_create_resource, - mock_replica_tasks_executions_create_resource, - mock_replica_tasks_execution_actions_create_resource, - mock_replica_schedules_create_resource, + mock_transfers_create_resource, + mock_transfer_actions_create_resource, + mock_transfer_tasks_executions_create_resource, + mock_transfer_tasks_execution_actions_create_resource, + mock_transfer_schedules_create_resource, mock_diagnostics_create_resource, + mock_deployment_actions_create_resource, + mock_deployments_create_resource ): ext_mgr = mock.sentinel.ext_mgr mapper = mock.Mock() @@ -161,30 +161,24 @@ def test_setup_routes( controller=mock_provider_schemas_create_resource.return_value, ), mock.call( - 'migration', 'migrations', - controller=mock_migrations_create_resource.return_value, - collection={'detail': 'GET'}, - member={'action': 'POST'} - ), - mock.call( - 'replica', 'replicas', - controller=mock_replicas_create_resource.return_value, + 'transfer', 'transfers', + controller=mock_transfers_create_resource.return_value, collection={'detail': 'GET'}, member={'action': 'POST'} ), mock.call( 'execution', - 'replicas/{replica_id}/executions', + 'transfers/{transfer_id}/executions', controller= - mock_replica_tasks_executions_create_resource.return_value, + mock_transfer_tasks_executions_create_resource.return_value, collection={'detail': 'GET'}, member={'action': 'POST'} ), mock.call( - 'replica_schedule', - 'replicas/{replica_id}/schedules', + 'transfer_schedule', + 'transfers/{transfer_id}/schedules', controller= - mock_replica_schedules_create_resource.return_value, + mock_transfer_schedules_create_resource.return_value, collection={'index': 'GET'}, member={'action': 'POST'} ), @@ -192,6 +186,12 @@ def test_setup_routes( 'diagnostics', 'diagnostics', controller=mock_diagnostics_create_resource.return_value, ), + mock.call( + 'deployment', 'deployments', + controller=mock_deployments_create_resource.return_value, + collection={'detail': 'GET'}, + member={'action': 'POST'} + ), ] connect_calls = [ @@ -212,33 +212,33 @@ def test_setup_routes( conditions={'method': 'POST'} ), mock.call( - 'migration_actions', - '/{project_id}/migrations/{id}/actions', - controller= - mock_migration_actions_create_resource.return_value, + 'transfer_actions', + '/{project_id}/transfers/{id}/actions', + controller=mock_transfer_actions_create_resource.return_value, action='action', conditions={'method': 'POST'} ), mock.call( - 'replica_actions', - '/{project_id}/replicas/{id}/actions', - controller=mock_replica_actions_create_resource.return_value, + 'transfer_tasks_execution_actions', + '/{project_id}/transfers/{transfer_id}/' + 'executions/{id}/actions', + controller= + mock_transfer_tasks_execution_actions_create_resource. + return_value, action='action', conditions={'method': 'POST'} ), mock.call( - 'replica_tasks_execution_actions', - '/{project_id}/replicas/{replica_id}/executions/{id}/actions', - controller= - mock_replica_tasks_execution_actions_create_resource. - return_value, + 'deployment_actions', '/{project_id}/deployments/{id}/actions', + controller=( + mock_deployment_actions_create_resource.return_value), action='action', - conditions={'method': 'POST'} + conditions={"method": "POST"} ), ] self.router._setup_routes(mapper, ext_mgr) mapper.redirect.assert_called_once_with("", "/") - mapper.resource.assert_has_calls(resource_calls) - mapper.connect.assert_has_calls(connect_calls) + mapper.resource.assert_has_calls(resource_calls, any_order=True) + mapper.connect.assert_has_calls(connect_calls, any_order=True) diff --git a/coriolis/tests/api/v1/test_replica_actions.py b/coriolis/tests/api/v1/test_transfer_actions.py similarity index 70% rename from coriolis/tests/api/v1/test_replica_actions.py rename to coriolis/tests/api/v1/test_transfer_actions.py index f4b3a8896..bbfc4aaae 100644 --- a/coriolis/tests/api/v1/test_replica_actions.py +++ b/coriolis/tests/api/v1/test_transfer_actions.py @@ -5,22 +5,22 @@ from webob import exc -from coriolis.api.v1 import replica_actions -from coriolis.api.v1.views import replica_tasks_execution_view +from coriolis.api.v1 import transfer_actions +from coriolis.api.v1.views import transfer_tasks_execution_view from coriolis import exception -from coriolis.replicas import api from coriolis.tests import test_base from coriolis.tests import testutils +from coriolis.transfers import api -class ReplicaActionsControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Replica Actions v1 API""" +class TransferActionsControllerTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis Transfer Actions v1 API""" def setUp(self): - super(ReplicaActionsControllerTestCase, self).setUp() - self.replica_actions = replica_actions.ReplicaActionsController() + super(TransferActionsControllerTestCase, self).setUp() + self.transfer_actions = transfer_actions.TransferActionsController() - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'delete_disks') def test_delete_disks( self, @@ -34,7 +34,7 @@ def test_delete_disks( body = mock.sentinel.body result = testutils.get_wrapped_function( - self.replica_actions._delete_disks)( + self.transfer_actions._delete_disks)( mock_req, id, body @@ -46,11 +46,11 @@ def test_delete_disks( ) mock_context.can.assert_called_once_with( - "migration:replicas:delete_disks") + "migration:transfers:delete_disks") mock_delete_disks.assert_called_once_with(mock_context, id) mock_single.assert_called_once_with(mock_delete_disks.return_value) - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'delete_disks') def test_delete_disks_not_found( self, @@ -66,18 +66,19 @@ def test_delete_disks_not_found( self.assertRaises( exc.HTTPNotFound, - testutils.get_wrapped_function(self.replica_actions._delete_disks), + testutils.get_wrapped_function( + self.transfer_actions._delete_disks), req=mock_req, id=id, body=body ) mock_context.can.assert_called_once_with( - "migration:replicas:delete_disks") + "migration:transfers:delete_disks") mock_delete_disks.assert_called_once_with(mock_context, id) mock_single.assert_not_called() - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'delete_disks') def test_delete_disks_invalid_parameter_value( self, @@ -93,13 +94,14 @@ def test_delete_disks_invalid_parameter_value( self.assertRaises( exc.HTTPNotFound, - testutils.get_wrapped_function(self.replica_actions._delete_disks), + testutils.get_wrapped_function( + self.transfer_actions._delete_disks), req=mock_req, id=id, body=body ) mock_context.can.assert_called_once_with( - "migration:replicas:delete_disks") + "migration:transfers:delete_disks") mock_delete_disks.assert_called_once_with(mock_context, id) mock_single.assert_not_called() diff --git a/coriolis/tests/api/v1/test_replica_schedules.py b/coriolis/tests/api/v1/test_transfer_schedules.py similarity index 73% rename from coriolis/tests/api/v1/test_replica_schedules.py rename to coriolis/tests/api/v1/test_transfer_schedules.py index 27a8ac1da..d143f7daa 100644 --- a/coriolis/tests/api/v1/test_replica_schedules.py +++ b/coriolis/tests/api/v1/test_transfer_schedules.py @@ -7,22 +7,23 @@ import jsonschema from webob import exc -from coriolis.api.v1 import replica_schedules -from coriolis.api.v1.views import replica_schedule_view +from coriolis.api.v1 import transfer_schedules +from coriolis.api.v1.views import transfer_schedule_view from coriolis import exception -from coriolis.replica_cron import api from coriolis import schemas from coriolis.tests import test_base +from coriolis.transfer_cron import api -class ReplicaScheduleControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Replica Schedule v1 API""" +class TransferScheduleControllerTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis Transfer Schedule v1 API""" def setUp(self): - super(ReplicaScheduleControllerTestCase, self).setUp() - self.replica_schedules = replica_schedules.ReplicaScheduleController() + super(TransferScheduleControllerTestCase, self).setUp() + self.transfer_schedules = ( + transfer_schedules.TransferScheduleController()) - @mock.patch.object(replica_schedule_view, 'single') + @mock.patch.object(transfer_schedule_view, 'single') @mock.patch.object(api.API, 'get_schedule') def test_show( self, @@ -33,9 +34,9 @@ def test_show( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} id = mock.sentinel.id - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id - result = self.replica_schedules.show(mock_req, replica_id, id) + result = self.transfer_schedules.show(mock_req, transfer_id, id) self.assertEqual( mock_single.return_value, @@ -43,11 +44,12 @@ def test_show( ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:show") - mock_get_schedule.assert_called_once_with(mock_context, replica_id, id) + "migration:transfer_schedules:show") + mock_get_schedule.assert_called_once_with( + mock_context, transfer_id, id) mock_single.assert_called_once_with(mock_get_schedule.return_value) - @mock.patch.object(replica_schedule_view, 'single') + @mock.patch.object(transfer_schedule_view, 'single') @mock.patch.object(api.API, 'get_schedule') def test_show_not_found( self, @@ -58,23 +60,24 @@ def test_show_not_found( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} id = mock.sentinel.id - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id mock_get_schedule.return_value = None self.assertRaises( exc.HTTPNotFound, - self.replica_schedules.show, + self.transfer_schedules.show, mock_req, - replica_id, + transfer_id, id ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:show") - mock_get_schedule.assert_called_once_with(mock_context, replica_id, id) + "migration:transfer_schedules:show") + mock_get_schedule.assert_called_once_with( + mock_context, transfer_id, id) mock_single.assert_not_called() - @mock.patch.object(replica_schedule_view, 'collection') + @mock.patch.object(transfer_schedule_view, 'collection') @mock.patch.object(api.API, 'get_schedules') def test_index( self, @@ -84,20 +87,20 @@ def test_index( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id mock_req.GET = {"show_expired": "False"} - result = self.replica_schedules.index(mock_req, replica_id) + result = self.transfer_schedules.index(mock_req, transfer_id) self.assertEqual( mock_collection.return_value, result ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:list") + "migration:transfer_schedules:list") mock_get_schedules.assert_called_once_with( mock_context, - replica_id, + transfer_id, expired=False ) mock_collection.assert_called_once_with( @@ -112,7 +115,7 @@ def test_validate_schedule( ): schedule = mock.sentinel.schedule - result = self.replica_schedules._validate_schedule(schedule) + result = self.transfer_schedules._validate_schedule(schedule) self.assertEqual( schedule, @@ -127,7 +130,7 @@ def test_validate_expiration_date_is_none( ): expiration_date = None - result = self.replica_schedules._validate_expiration_date( + result = self.transfer_schedules._validate_expiration_date( expiration_date) self.assertEqual( @@ -142,7 +145,7 @@ def test_validate_expiration_date_past( self.assertRaises( exception.InvalidInput, - self.replica_schedules._validate_expiration_date, + self.transfer_schedules._validate_expiration_date, expiration_date ) @@ -151,7 +154,7 @@ def test_validate_expiration_date( ): expiration_date = '9999-12-31' - result = self.replica_schedules._validate_expiration_date( + result = self.transfer_schedules._validate_expiration_date( expiration_date) self.assertEqual( @@ -159,11 +162,11 @@ def test_validate_expiration_date( result ) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_expiration_date') @mock.patch.object(schemas, 'validate_value') @mock.patch.object(jsonschema, 'FormatChecker') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_schedule') def test_validate_create_body( self, @@ -187,7 +190,7 @@ def test_validate_create_body( True ) - result = self.replica_schedules._validate_create_body(mock_body) + result = self.transfer_schedules._validate_create_body(mock_body) self.assertEqual( expected_result, @@ -201,11 +204,11 @@ def test_validate_create_body( ) mock_validate_expiration_date.assert_called_once_with(date) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_expiration_date') @mock.patch.object(schemas, 'validate_value') @mock.patch.object(jsonschema, 'FormatChecker') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_schedule') def test_validate_create_body_no_expiration_date( self, @@ -227,7 +230,7 @@ def test_validate_create_body_no_expiration_date( True ) - result = self.replica_schedules._validate_create_body(mock_body) + result = self.transfer_schedules._validate_create_body(mock_body) self.assertEqual( expected_result, @@ -249,15 +252,15 @@ def test_validate_create_body_no_schedule( self.assertRaises( exception.InvalidInput, - self.replica_schedules._validate_create_body, + self.transfer_schedules._validate_create_body, mock_body ) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_expiration_date') @mock.patch.object(schemas, 'validate_value') @mock.patch.object(jsonschema, 'FormatChecker') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_schedule') def test_validate_update_body( self, @@ -281,7 +284,8 @@ def test_validate_update_body( "shutdown_instance": True } - result = self.replica_schedules._validate_update_body(mock_update_body) + result = self.transfer_schedules._validate_update_body( + mock_update_body) self.assertEqual( expected_result, @@ -295,11 +299,11 @@ def test_validate_update_body( ) mock_validate_expiration_date.assert_called_once_with(date) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_expiration_date') @mock.patch.object(schemas, 'validate_value') @mock.patch.object(jsonschema, 'FormatChecker') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_schedule') def test_validate_update_body_none( self, @@ -311,7 +315,8 @@ def test_validate_update_body_none( mock_update_body = {} expected_result = {} - result = self.replica_schedules._validate_update_body(mock_update_body) + result = self.transfer_schedules._validate_update_body( + mock_update_body) self.assertEqual( expected_result, @@ -325,9 +330,9 @@ def test_validate_update_body_none( ) mock_validate_expiration_date.assert_not_called() - @mock.patch.object(replica_schedule_view, 'single') + @mock.patch.object(transfer_schedule_view, 'single') @mock.patch.object(api.API, 'create') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_create_body') def test_create( self, @@ -338,14 +343,14 @@ def test_create( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id body = mock.sentinel.body schedule = mock.sentinel.schedule exp_date = mock.sentinel.exp_date mock_validate_create_body.return_value = ( schedule, False, exp_date, True) - result = self.replica_schedules.create(mock_req, replica_id, body) + result = self.transfer_schedules.create(mock_req, transfer_id, body) self.assertEqual( mock_single.return_value, @@ -353,13 +358,13 @@ def test_create( ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:create") + "migration:transfer_schedules:create") mock_validate_create_body.assert_called_once_with(body) mock_create.assert_called_once_with( - mock_context, replica_id, schedule, False, exp_date, True) + mock_context, transfer_id, schedule, False, exp_date, True) mock_single.assert_called_once_with(mock_create.return_value) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_create_body') def test_create_except( self, @@ -368,25 +373,25 @@ def test_create_except( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id body = mock.sentinel.body mock_validate_create_body.side_effect = Exception("err") self.assertRaises( exception.InvalidInput, - self.replica_schedules.create, + self.transfer_schedules.create, mock_req, - replica_id, + transfer_id, body ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:create") + "migration:transfer_schedules:create") mock_validate_create_body.assert_called_once_with(body) - @mock.patch.object(replica_schedule_view, 'single') + @mock.patch.object(transfer_schedule_view, 'single') @mock.patch.object(api.API, 'update') - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_update_body') def test_update( self, @@ -397,11 +402,12 @@ def test_update( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id body = mock.sentinel.body - result = self.replica_schedules.update(mock_req, replica_id, id, body) + result = self.transfer_schedules.update( + mock_req, transfer_id, id, body) self.assertEqual( mock_single.return_value, @@ -409,14 +415,14 @@ def test_update( ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:update") + "migration:transfer_schedules:update") mock_validate_update_body.assert_called_once_with(body) mock_update.assert_called_once_with( - mock_context, replica_id, id, + mock_context, transfer_id, id, mock_validate_update_body.return_value) mock_single.assert_called_once_with(mock_update.return_value) - @mock.patch.object(replica_schedules.ReplicaScheduleController, + @mock.patch.object(transfer_schedules.TransferScheduleController, '_validate_update_body') def test_update_except( self, @@ -425,22 +431,22 @@ def test_update_except( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id body = mock.sentinel.body mock_validate_update_body.side_effect = Exception("err") self.assertRaises( exception.InvalidInput, - self.replica_schedules.update, + self.transfer_schedules.update, mock_req, - replica_id, + transfer_id, id, body ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:update") + "migration:transfer_schedules:update") mock_validate_update_body.assert_called_once_with(body) @mock.patch.object(api.API, 'delete') @@ -451,17 +457,17 @@ def test_delete( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id self.assertRaises( exc.HTTPNoContent, - self.replica_schedules.delete, + self.transfer_schedules.delete, mock_req, - replica_id, + transfer_id, id ) mock_context.can.assert_called_once_with( - "migration:replica_schedules:delete") - mock_delete.assert_called_once_with(mock_context, replica_id, id) + "migration:transfer_schedules:delete") + mock_delete.assert_called_once_with(mock_context, transfer_id, id) diff --git a/coriolis/tests/api/v1/test_replica_tasks_execution_actions.py b/coriolis/tests/api/v1/test_transfer_tasks_execution_actions.py similarity index 57% rename from coriolis/tests/api/v1/test_replica_tasks_execution_actions.py rename to coriolis/tests/api/v1/test_transfer_tasks_execution_actions.py index 3e46b7254..bddfb363c 100644 --- a/coriolis/tests/api/v1/test_replica_tasks_execution_actions.py +++ b/coriolis/tests/api/v1/test_transfer_tasks_execution_actions.py @@ -6,25 +6,26 @@ import ddt from webob import exc -from coriolis.api.v1 import replica_tasks_execution_actions as replica_api +from coriolis.api.v1 import transfer_tasks_execution_actions as transfer_api from coriolis import exception -from coriolis.replica_tasks_executions import api from coriolis.tests import test_base from coriolis.tests import testutils +from coriolis.transfer_tasks_executions import api @ddt.ddt -class ReplicaTasksExecutionActionsControllerTestCase( +class TransferTasksExecutionActionsControllerTestCase( test_base.CoriolisBaseTestCase ): - """Test suite for the Coriolis Replica Tasks Execution Actions v1 API""" + """Test suite for the Coriolis Transfer Tasks Execution Actions v1 API""" def setUp(self): - super(ReplicaTasksExecutionActionsControllerTestCase, self).setUp() - self.replica_api = replica_api.ReplicaTasksExecutionActionsController() + super(TransferTasksExecutionActionsControllerTestCase, self).setUp() + self.transfer_api = ( + transfer_api.TransferTasksExecutionActionsController()) @mock.patch.object(api.API, 'cancel') - @ddt.file_data('data/replica_task_execution_actions_cancel.yml') + @ddt.file_data('data/transfer_task_execution_actions_cancel.yml') def test_cancel( self, mock_cancel, @@ -37,7 +38,7 @@ def test_cancel( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} id = mock.sentinel.id - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id body = config["body"] if exception_raised: mock_cancel.side_effect = getattr(exception, exception_raised)( @@ -45,14 +46,14 @@ def test_cancel( self.assertRaises( getattr(exc, expected_result), - testutils.get_wrapped_function(self.replica_api._cancel), + testutils.get_wrapped_function(self.transfer_api._cancel), mock_req, - replica_id, + transfer_id, id, body ) mock_context.can.assert_called_once_with( - "migration:replica_executions:cancel") + "migration:transfer_executions:cancel") mock_cancel.assert_called_once_with( - mock_context, replica_id, id, expected_force) + mock_context, transfer_id, id, expected_force) diff --git a/coriolis/tests/api/v1/test_replica_tasks_executions.py b/coriolis/tests/api/v1/test_transfer_tasks_executions.py similarity index 63% rename from coriolis/tests/api/v1/test_replica_tasks_executions.py rename to coriolis/tests/api/v1/test_transfer_tasks_executions.py index fce66069f..de607b2e5 100644 --- a/coriolis/tests/api/v1/test_replica_tasks_executions.py +++ b/coriolis/tests/api/v1/test_transfer_tasks_executions.py @@ -5,21 +5,21 @@ from webob import exc -from coriolis.api.v1 import replica_tasks_executions as replica_api -from coriolis.api.v1.views import replica_tasks_execution_view +from coriolis.api.v1 import transfer_tasks_executions as transfer_api +from coriolis.api.v1.views import transfer_tasks_execution_view from coriolis import exception -from coriolis.replica_tasks_executions import api from coriolis.tests import test_base +from coriolis.transfer_tasks_executions import api -class ReplicaTasksExecutionControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Replica Tasks Execution v1 API""" +class TransferTasksExecutionControllerTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis Transfer Tasks Execution v1 API""" def setUp(self): - super(ReplicaTasksExecutionControllerTestCase, self).setUp() - self.replica_api = replica_api.ReplicaTasksExecutionController() + super(TransferTasksExecutionControllerTestCase, self).setUp() + self.transfer_api = transfer_api.TransferTasksExecutionController() - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'get_execution') def test_show( self, @@ -29,10 +29,10 @@ def test_show( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id - result = self.replica_api.show(mock_req, replica_id, id) + result = self.transfer_api.show(mock_req, transfer_id, id) self.assertEqual( mock_single.return_value, @@ -40,12 +40,12 @@ def test_show( ) mock_context.can.assert_called_once_with( - "migration:replica_executions:show") + "migration:transfer_executions:show") mock_get_execution.assert_called_once_with( - mock_context, replica_id, id) + mock_context, transfer_id, id) mock_single.assert_called_once_with(mock_get_execution.return_value) - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'get_execution') def test_show_not_found( self, @@ -55,25 +55,25 @@ def test_show_not_found( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id mock_get_execution.return_value = None self.assertRaises( exc.HTTPNotFound, - self.replica_api.show, + self.transfer_api.show, mock_req, - replica_id, + transfer_id, id ) mock_context.can.assert_called_once_with( - "migration:replica_executions:show") + "migration:transfer_executions:show") mock_get_execution.assert_called_once_with( - mock_context, replica_id, id) + mock_context, transfer_id, id) mock_single.assert_not_called() - @mock.patch.object(replica_tasks_execution_view, 'collection') + @mock.patch.object(transfer_tasks_execution_view, 'collection') @mock.patch.object(api.API, 'get_executions') def test_index( self, @@ -83,9 +83,9 @@ def test_index( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id - result = self.replica_api.index(mock_req, replica_id) + result = self.transfer_api.index(mock_req, transfer_id) self.assertEqual( mock_collection.return_value, @@ -93,13 +93,13 @@ def test_index( ) mock_context.can.assert_called_once_with( - "migration:replica_executions:list") + "migration:transfer_executions:list") mock_get_executions.assert_called_once_with( - mock_context, replica_id, include_tasks=False) + mock_context, transfer_id, include_tasks=False) mock_collection.assert_called_once_with( mock_get_executions.return_value) - @mock.patch.object(replica_tasks_execution_view, 'collection') + @mock.patch.object(transfer_tasks_execution_view, 'collection') @mock.patch.object(api.API, 'get_executions') def test_detail( self, @@ -109,9 +109,9 @@ def test_detail( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id - result = self.replica_api.detail(mock_req, replica_id) + result = self.transfer_api.detail(mock_req, transfer_id) self.assertEqual( mock_collection.return_value, @@ -119,13 +119,13 @@ def test_detail( ) mock_context.can.assert_called_once_with( - "migration:replica_executions:show") + "migration:transfer_executions:show") mock_get_executions.assert_called_once_with( - mock_context, replica_id, include_tasks=True) + mock_context, transfer_id, include_tasks=True) mock_collection.assert_called_once_with( mock_get_executions.return_value) - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'create') def test_create( self, @@ -135,11 +135,11 @@ def test_create( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id execution = {"shutdown_instances": True} mock_body = {"execution": execution} - result = self.replica_api.create(mock_req, replica_id, mock_body) + result = self.transfer_api.create(mock_req, transfer_id, mock_body) self.assertEqual( mock_single.return_value, @@ -147,12 +147,12 @@ def test_create( ) mock_context.can.assert_called_once_with( - "migration:replica_executions:create") + "migration:transfer_executions:create") mock_create.assert_called_once_with( - mock_context, replica_id, True) + mock_context, transfer_id, True) mock_single.assert_called_once_with(mock_create.return_value) - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'create') def test_create_no_executions( self, @@ -162,10 +162,10 @@ def test_create_no_executions( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id mock_body = {} - result = self.replica_api.create(mock_req, replica_id, mock_body) + result = self.transfer_api.create(mock_req, transfer_id, mock_body) self.assertEqual( mock_single.return_value, @@ -173,9 +173,9 @@ def test_create_no_executions( ) mock_context.can.assert_called_once_with( - "migration:replica_executions:create") + "migration:transfer_executions:create") mock_create.assert_called_once_with( - mock_context, replica_id, False) + mock_context, transfer_id, False) mock_single.assert_called_once_with(mock_create.return_value) @mock.patch.object(api.API, 'delete') @@ -186,20 +186,20 @@ def test_delete( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id self.assertRaises( exc.HTTPNoContent, - self.replica_api.delete, + self.transfer_api.delete, mock_req, - replica_id, + transfer_id, id ) mock_context.can.assert_called_once_with( - "migration:replica_executions:delete") - mock_delete.assert_called_once_with(mock_context, replica_id, id) + "migration:transfer_executions:delete") + mock_delete.assert_called_once_with(mock_context, transfer_id, id) @mock.patch.object(api.API, 'delete') def test_delete_not_found( @@ -209,18 +209,18 @@ def test_delete_not_found( mock_req = mock.Mock() mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - replica_id = mock.sentinel.replica_id + transfer_id = mock.sentinel.transfer_id id = mock.sentinel.id mock_delete.side_effect = exception.NotFound() self.assertRaises( exc.HTTPNotFound, - self.replica_api.delete, + self.transfer_api.delete, mock_req, - replica_id, + transfer_id, id ) mock_context.can.assert_called_once_with( - "migration:replica_executions:delete") - mock_delete.assert_called_once_with(mock_context, replica_id, id) + "migration:transfer_executions:delete") + mock_delete.assert_called_once_with(mock_context, transfer_id, id) diff --git a/coriolis/tests/api/v1/test_replicas.py b/coriolis/tests/api/v1/test_transfers.py similarity index 67% rename from coriolis/tests/api/v1/test_replicas.py rename to coriolis/tests/api/v1/test_transfers.py index ff1cdecc5..f2fa60812 100644 --- a/coriolis/tests/api/v1/test_replicas.py +++ b/coriolis/tests/api/v1/test_transfers.py @@ -6,31 +6,31 @@ import ddt from webob import exc -from coriolis.api.v1 import replicas +from coriolis.api.v1 import transfers from coriolis.api.v1 import utils as api_utils -from coriolis.api.v1.views import replica_tasks_execution_view -from coriolis.api.v1.views import replica_view +from coriolis.api.v1.views import transfer_tasks_execution_view +from coriolis.api.v1.views import transfer_view from coriolis.endpoints import api as endpoints_api from coriolis import exception -from coriolis.replicas import api from coriolis.tests import test_base from coriolis.tests import testutils +from coriolis.transfers import api @ddt.ddt -class ReplicaControllerTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Replica Controller v1 API""" +class TransferControllerTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis Transfer Controller v1 API""" def setUp(self): - super(ReplicaControllerTestCase, self).setUp() - self.replicas = replicas.ReplicaController() + super(TransferControllerTestCase, self).setUp() + self.transfers = transfers.TransferController() - @mock.patch('coriolis.api.v1.replicas.CONF') - @mock.patch.object(replica_view, 'single') - @mock.patch.object(api.API, 'get_replica') + @mock.patch('coriolis.api.v1.transfers.CONF') + @mock.patch.object(transfer_view, 'single') + @mock.patch.object(api.API, 'get_transfer') def test_show( self, - mock_get_replica, + mock_get_transfer, mock_single, mock_conf ): @@ -38,26 +38,26 @@ def test_show( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} id = mock.sentinel.id - mock_conf.api.include_task_info_in_replicas_api = True + mock_conf.api.include_task_info_in_transfers_api = True - result = self.replicas.show(mock_req, id) + result = self.transfers.show(mock_req, id) self.assertEqual( mock_single.return_value, result ) - mock_context.can.assert_called_once_with("migration:replicas:show") - mock_get_replica.assert_called_once_with( + mock_context.can.assert_called_once_with("migration:transfers:show") + mock_get_transfer.assert_called_once_with( mock_context, id, include_task_info=True) - mock_single.assert_called_once_with(mock_get_replica.return_value) + mock_single.assert_called_once_with(mock_get_transfer.return_value) - @mock.patch('coriolis.api.v1.replicas.CONF') - @mock.patch.object(replica_view, 'single') - @mock.patch.object(api.API, 'get_replica') - def test_show_no_replica( + @mock.patch('coriolis.api.v1.transfers.CONF') + @mock.patch.object(transfer_view, 'single') + @mock.patch.object(api.API, 'get_transfer') + def test_show_no_transfer( self, - mock_get_replica, + mock_get_transfer, mock_single, mock_conf ): @@ -65,29 +65,29 @@ def test_show_no_replica( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} id = mock.sentinel.id - mock_conf.api.include_task_info_in_replicas_api = True - mock_get_replica.return_value = None + mock_conf.api.include_task_info_in_transfers_api = True + mock_get_transfer.return_value = None self.assertRaises( exc.HTTPNotFound, - self.replicas.show, + self.transfers.show, mock_req, id ) - mock_context.can.assert_called_once_with("migration:replicas:show") - mock_get_replica.assert_called_once_with( + mock_context.can.assert_called_once_with("migration:transfers:show") + mock_get_transfer.assert_called_once_with( mock_context, id, include_task_info=True) mock_single.assert_not_called() - @mock.patch('coriolis.api.v1.replicas.CONF') - @mock.patch.object(replica_view, 'collection') - @mock.patch.object(api.API, 'get_replicas') + @mock.patch('coriolis.api.v1.transfers.CONF') + @mock.patch.object(transfer_view, 'collection') + @mock.patch.object(api.API, 'get_transfers') @mock.patch.object(api_utils, '_get_show_deleted') def test_list( self, mock_get_show_deleted, - mock_get_replicas, + mock_get_transfers, mock_collection, mock_conf ): @@ -95,7 +95,7 @@ def test_list( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} - result = self.replicas._list(mock_req) + result = self.transfers._list(mock_req) self.assertEqual( mock_collection.return_value, @@ -104,14 +104,15 @@ def test_list( mock_get_show_deleted.assert_called_once_with( mock_req.GET.get.return_value) - mock_context.can.assert_called_once_with("migration:replicas:list") - mock_get_replicas.assert_called_once_with( + mock_context.can.assert_called_once_with("migration:transfers:list") + mock_get_transfers.assert_called_once_with( mock_context, include_tasks_executions= - mock_conf.api.include_task_info_in_replicas_api, - include_task_info=mock_conf.api.include_task_info_in_replicas_api + mock_conf.api.include_task_info_in_transfers_api, + include_task_info=mock_conf.api.include_task_info_in_transfers_api ) - mock_collection.assert_called_once_with(mock_get_replicas.return_value) + mock_collection.assert_called_once_with( + mock_get_transfers.return_value) @mock.patch.object(api_utils, 'validate_instances_list_for_transfer') @mock.patch.object(endpoints_api.API, 'validate_source_environment') @@ -120,7 +121,7 @@ def test_list( @mock.patch.object(api_utils, 'validate_user_scripts') @mock.patch.object(api_utils, 'normalize_user_scripts') @mock.patch.object(api_utils, 'validate_storage_mappings') - @ddt.file_data('data/replicas_validate_create_body.yml') + @ddt.file_data('data/transfers_validate_create_body.yml') def test_validate_create_body( self, mock_validate_storage_mappings, @@ -136,15 +137,15 @@ def test_validate_create_body( ): ctxt = {} body = config["body"] - replica = body["replica"] - origin_endpoint_id = replica.get('origin_endpoint_id') - source_environment = replica.get('source_environment') - network_map = replica.get('network_map') - destination_endpoint_id = replica.get('destination_endpoint_id') - destination_environment = replica.get('destination_environment') - user_scripts = replica.get('user_scripts') - instances = replica.get('instances') - storage_mappings = replica.get('storage_mappings') + transfer = body["transfer"] + origin_endpoint_id = transfer.get('origin_endpoint_id') + source_environment = transfer.get('source_environment') + network_map = transfer.get('network_map') + destination_endpoint_id = transfer.get('destination_endpoint_id') + destination_environment = transfer.get('destination_environment') + user_scripts = transfer.get('user_scripts') + instances = transfer.get('instances') + storage_mappings = transfer.get('storage_mappings') mock_validate_instances_list_for_transfer.return_value = instances mock_normalize_user_scripts.return_value = user_scripts @@ -153,8 +154,8 @@ def test_validate_create_body( Exception, exception_raised, testutils.get_wrapped_function( - self.replicas._validate_create_body), - self.replicas, + self.transfers._validate_create_body), + self.transfers, ctxt, body ) @@ -162,8 +163,8 @@ def test_validate_create_body( mock_validate_network_map.assert_not_called() else: result = testutils.get_wrapped_function( - self.replicas._validate_create_body)( - self.replicas, + self.transfers._validate_create_body)( + self.transfers, ctxt, body, ) @@ -187,9 +188,9 @@ def test_validate_create_body( mock_validate_instances_list_for_transfer.assert_called_once_with( instances) - @mock.patch.object(replica_view, 'single') + @mock.patch.object(transfer_view, 'single') @mock.patch.object(api.API, 'create') - @mock.patch.object(replicas.ReplicaController, '_validate_create_body') + @mock.patch.object(transfers.TransferController, '_validate_create_body') def test_create( self, mock_validate_create_body, @@ -200,9 +201,9 @@ def test_create( mock_context = mock.Mock() mock_req.environ = {'coriolis.context': mock_context} mock_body = {} - mock_validate_create_body.return_value = (mock.sentinel.value,) * 12 + mock_validate_create_body.return_value = (mock.sentinel.value,) * 13 - result = self.replicas.create(mock_req, mock_body) + result = self.transfers.create(mock_req, mock_body) self.assertEqual( mock_single.return_value, @@ -210,7 +211,7 @@ def test_create( ) mock_context.can.assert_called_once_with( - "migration:replicas:create") + "migration:transfers:create") mock_validate_create_body.assert_called_once_with( mock_context, mock_body) mock_create.assert_called_once() @@ -228,7 +229,7 @@ def test_delete( self.assertRaises( exc.HTTPNoContent, - self.replicas.delete, + self.transfers.delete, mock_req, id ) @@ -248,15 +249,15 @@ def test_delete_not_found( self.assertRaises( exc.HTTPNotFound, - self.replicas.delete, + self.transfers.delete, mock_req, id ) - mock_context.can.assert_called_once_with("migration:replicas:delete") + mock_context.can.assert_called_once_with("migration:transfers:delete") mock_delete.assert_called_once_with(mock_context, id) - @ddt.file_data('data/replicas_update_storage_mappings.yml') + @ddt.file_data('data/transfers_update_storage_mappings.yml') def test_update_storage_mappings( self, config, @@ -267,11 +268,11 @@ def test_update_storage_mappings( new_storage_mappings = config['new_storage_mappings'] if logs_expected: - with self.assertLogs('coriolis.api.v1.replicas', level='INFO'): - result = self.replicas._update_storage_mappings( + with self.assertLogs('coriolis.api.v1.transfers', level='INFO'): + result = self.transfers._update_storage_mappings( original_storage_mappings, new_storage_mappings) else: - result = self.replicas._update_storage_mappings( + result = self.transfers._update_storage_mappings( original_storage_mappings, new_storage_mappings) self.assertEqual( @@ -296,7 +297,7 @@ def test_get_updated_user_scripts( "mock_global_scripts_2": "mock_value"}, 'instances': {"mock_instance_scripts": "mock_new_value"} } - result = self.replicas._get_updated_user_scripts( + result = self.transfers._get_updated_user_scripts( original_user_scripts, new_user_scripts) self.assertEqual( @@ -314,7 +315,7 @@ def test_get_updated_user_scripts_new_user_scripts_empty( } new_user_scripts = {} - result = self.replicas._get_updated_user_scripts( + result = self.transfers._get_updated_user_scripts( original_user_scripts, new_user_scripts) self.assertEqual( @@ -322,11 +323,13 @@ def test_get_updated_user_scripts_new_user_scripts_empty( result ) - @mock.patch.object(replicas.ReplicaController, '_get_updated_user_scripts') + @mock.patch.object(transfers.TransferController, + '_get_updated_user_scripts') @mock.patch.object(api_utils, 'validate_user_scripts') - @mock.patch.object(replicas.ReplicaController, '_update_storage_mappings') - @ddt.file_data('data/replicas_get_merged_replica_values.yml') - def test_get_merged_replica_values( + @mock.patch.object(transfers.TransferController, + '_update_storage_mappings') + @ddt.file_data('data/transfers_get_merged_transfer_values.yml') + def test_get_merged_transfer_values( self, mock_update_storage_mappings, mock_validate_user_scripts, @@ -334,10 +337,10 @@ def test_get_merged_replica_values( config, expected_result ): - replica = config['replica'] + transfer = config['transfer'] updated_values = config['updated_values'] - original_storage_mapping = replica.get('storage_mappings', {}) - replica_user_scripts = replica.get('user_scripts', {}) + original_storage_mapping = transfer.get('storage_mappings', {}) + transfer_user_scripts = transfer.get('user_scripts', {}) updated_user_scripts = updated_values.get('user_scripts', {}) new_storage_mappings = updated_values.get('storage_mappings', {}) expected_result['storage_mappings'] = \ @@ -349,8 +352,8 @@ def test_get_merged_replica_values( mock_validate_user_scripts.side_effect = ["mock_scripts", "mock_new_scripts"] - result = self.replicas._get_merged_replica_values( - replica, updated_values) + result = self.transfers._get_merged_transfer_values( + transfer, updated_values) self.assertEqual( expected_result, @@ -360,7 +363,8 @@ def test_get_merged_replica_values( mock_update_storage_mappings.assert_called_once_with( original_storage_mapping, new_storage_mappings) mock_validate_user_scripts.assert_has_calls( - [mock.call(replica_user_scripts), mock.call(updated_user_scripts)]) + [mock.call(transfer_user_scripts), + mock.call(updated_user_scripts)]) mock_get_updated_user_scripts.assert_called_once_with( "mock_scripts", "mock_new_scripts") @@ -370,14 +374,14 @@ def test_get_merged_replica_values( @mock.patch.object(api_utils, 'validate_network_map') @mock.patch.object(endpoints_api.API, 'validate_target_environment') @mock.patch.object(endpoints_api.API, 'validate_source_environment') - @mock.patch.object(replicas.ReplicaController, - '_get_merged_replica_values') - @mock.patch.object(api.API, 'get_replica') - @ddt.file_data('data/replicas_validate_update_body.yml') + @mock.patch.object(transfers.TransferController, + '_get_merged_transfer_values') + @mock.patch.object(api.API, 'get_transfer') + @ddt.file_data('data/transfers_validate_update_body.yml') def test_validate_update_body( self, - mock_get_replica, - mock_get_merged_replica_values, + mock_get_transfer, + mock_get_merged_transfer_values, mock_validate_source_environment, mock_validate_target_environment, mock_validate_network_map, @@ -388,17 +392,18 @@ def test_validate_update_body( expected_result ): body = config['body'] - replica = config['replica'] - replica_body = body['replica'] + transfer = config['transfer'] + transfer_body = body['transfer'] context = mock.sentinel.context id = mock.sentinel.id - mock_get_replica.return_value = replica - mock_get_merged_replica_values.return_value = replica_body - mock_normalize_user_scripts.return_value = replica_body['user_scripts'] + mock_get_transfer.return_value = transfer + mock_get_merged_transfer_values.return_value = transfer_body + mock_normalize_user_scripts.return_value = transfer_body[ + 'user_scripts'] result = testutils.get_wrapped_function( - self.replicas._validate_update_body)( - self.replicas, + self.transfers._validate_update_body)( + self.transfers, id, context, body @@ -409,29 +414,29 @@ def test_validate_update_body( result ) - mock_get_replica.assert_called_once_with(context, id) - mock_get_merged_replica_values.assert_called_once_with( - replica, replica_body) + mock_get_transfer.assert_called_once_with(context, id) + mock_get_merged_transfer_values.assert_called_once_with( + transfer, transfer_body) mock_validate_source_environment.assert_called_once_with( - context, replica['origin_endpoint_id'], - replica_body['source_environment']) + context, transfer['origin_endpoint_id'], + transfer_body['source_environment']) mock_validate_target_environment.assert_called_once_with( - context, replica['destination_endpoint_id'], - replica_body['destination_environment']) + context, transfer['destination_endpoint_id'], + transfer_body['destination_environment']) mock_validate_network_map.assert_called_once_with( - replica_body['network_map']) + transfer_body['network_map']) mock_validate_storage_mappings.assert_called_once_with( - replica_body['storage_mappings']) + transfer_body['storage_mappings']) mock_validate_user_scripts.assert_called_once_with( - replica_body['user_scripts']) + transfer_body['user_scripts']) mock_normalize_user_scripts.assert_called_once_with( - replica_body['user_scripts'], replica['instances']) + transfer_body['user_scripts'], transfer['instances']) - @mock.patch.object(api.API, 'get_replica') - @ddt.file_data('data/replicas_validate_update_body_raises.yml') + @mock.patch.object(api.API, 'get_transfer') + @ddt.file_data('data/transfers_validate_update_body_raises.yml') def test_validate_update_body_raises( self, - mock_get_replica, + mock_get_transfer, body, ): context = mock.sentinel.context @@ -440,18 +445,18 @@ def test_validate_update_body_raises( self.assertRaises( exc.HTTPBadRequest, testutils.get_wrapped_function( - self.replicas._validate_update_body), - self.replicas, + self.transfers._validate_update_body), + self.transfers, id, context, body ) - mock_get_replica.assert_called_once_with(context, id) + mock_get_transfer.assert_called_once_with(context, id) - @mock.patch.object(replica_tasks_execution_view, 'single') + @mock.patch.object(transfer_tasks_execution_view, 'single') @mock.patch.object(api.API, 'update') - @mock.patch.object(replicas.ReplicaController, '_validate_update_body') + @mock.patch.object(transfers.TransferController, '_validate_update_body') def test_update( self, mock_validate_update_body, @@ -464,7 +469,7 @@ def test_update( id = mock.sentinel.id body = mock.sentinel.body - result = self.replicas.update(mock_req, id, body) + result = self.transfers.update(mock_req, id, body) self.assertEqual( mock_single.return_value, @@ -472,7 +477,7 @@ def test_update( ) mock_context.can.assert_called_once_with( - "migration:replicas:update") + "migration:transfers:update") mock_validate_update_body.assert_called_once_with( id, mock_context, body) mock_update.assert_called_once_with( @@ -481,7 +486,7 @@ def test_update( mock_single.assert_called_once_with(mock_update.return_value) @mock.patch.object(api.API, 'update') - @mock.patch.object(replicas.ReplicaController, '_validate_update_body') + @mock.patch.object(transfers.TransferController, '_validate_update_body') def test_update_not_found( self, mock_validate_update_body, @@ -496,14 +501,14 @@ def test_update_not_found( self.assertRaises( exc.HTTPNotFound, - self.replicas.update, + self.transfers.update, mock_req, id, body ) mock_context.can.assert_called_once_with( - "migration:replicas:update") + "migration:transfers:update") mock_validate_update_body.assert_called_once_with( id, mock_context, body) mock_update.assert_called_once_with( @@ -511,7 +516,7 @@ def test_update_not_found( mock_validate_update_body.return_value) @mock.patch.object(api.API, 'update') - @mock.patch.object(replicas.ReplicaController, '_validate_update_body') + @mock.patch.object(transfers.TransferController, '_validate_update_body') def test_update_not_invalid_parameter_value( self, mock_validate_update_body, @@ -526,14 +531,14 @@ def test_update_not_invalid_parameter_value( self.assertRaises( exc.HTTPNotFound, - self.replicas.update, + self.transfers.update, mock_req, id, body ) mock_context.can.assert_called_once_with( - "migration:replicas:update") + "migration:transfers:update") mock_validate_update_body.assert_called_once_with( id, mock_context, body) mock_update.assert_called_once_with( diff --git a/coriolis/tests/api/v1/views/__init__py b/coriolis/tests/api/v1/views/__init__py deleted file mode 100644 index e69de29bb..000000000 diff --git a/coriolis/tests/api/v1/views/test_migration_view.py b/coriolis/tests/api/v1/views/test_migration_view.py deleted file mode 100644 index a803aac45..000000000 --- a/coriolis/tests/api/v1/views/test_migration_view.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2023 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from coriolis.api.v1.views import migration_view -from coriolis.api.v1.views import replica_tasks_execution_view as view -from coriolis.api.v1.views import utils as view_utils -from coriolis.tests import test_base - - -class MigrationViewTestCase(test_base.CoriolisApiViewsTestCase): - """Test suite for the Coriolis api v1 views.""" - - @mock.patch.object(view, 'format_replica_tasks_execution') - @mock.patch.object(view_utils, 'format_opt') - def test_format_migration( - self, - mock_format_opt, - mock_format_replica_tasks_execution - ): - mock_execution = {'tasks': 'mock_id1'} - mock_format_opt.return_value = { - "executions": [mock_execution], - 'tasks': 'mock_id2', - 'mock_key': 'mock_value' - } - mock_format_replica_tasks_execution.return_value = mock_execution - - expected_result = { - 'tasks': 'mock_id1', - 'mock_key': 'mock_value' - } - - endpoint = mock.sentinel.endpoint - keys = mock.sentinel.keys - result = migration_view._format_migration(endpoint, keys) - - mock_format_replica_tasks_execution.assert_called_once_with( - mock_execution, keys - ) - mock_format_opt.assert_called_once_with(endpoint, keys) - - self.assertEqual( - expected_result, - result - ) - - @mock.patch.object(view_utils, 'format_opt') - def test_format_migration_no_tasks( - self, - mock_format_opt, - ): - mock_format_opt.return_value = { - 'mock_key': 'mock_value' - } - - endpoint = mock.sentinel.endpoint - keys = mock.sentinel.keys - result = migration_view._format_migration(endpoint, keys) - - mock_format_opt.assert_called_once_with(endpoint, keys) - - self.assertEqual( - mock_format_opt.return_value, - result - ) - - @mock.patch.object(view_utils, 'format_opt') - def test_format_migration_migration_dict_has_tasks( - self, - mock_format_opt, - ): - mock_format_opt.return_value = { - 'tasks': 'mock_id1', - 'mock_key': 'mock_value' - } - - endpoint = mock.sentinel.endpoint - keys = mock.sentinel.keys - result = migration_view._format_migration(endpoint, keys) - - mock_format_opt.assert_called_once_with(endpoint, keys) - - self.assertEqual( - mock_format_opt.return_value, - result - ) - - def test_single(self): - fun = getattr(migration_view, 'single') - self._single_view_test(fun, 'migration') - - def test_collection(self): - fun = getattr(migration_view, 'collection') - self._collection_view_test(fun, 'migrations') diff --git a/coriolis/tests/api/v1/views/test_replica_view.py b/coriolis/tests/api/v1/views/test_replica_view.py deleted file mode 100644 index ec5ffe3e5..000000000 --- a/coriolis/tests/api/v1/views/test_replica_view.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2023 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from coriolis.api.v1.views import replica_tasks_execution_view as view -from coriolis.api.v1.views import replica_view -from coriolis.api.v1.views import utils as view_utils -from coriolis.tests import test_base - - -class ReplicaViewTestCase(test_base.CoriolisApiViewsTestCase): - """Test suite for the Coriolis api v1 views.""" - - def setUp(self): - super(ReplicaViewTestCase, self).setUp() - self._format_fun = replica_view._format_replica - - @mock.patch.object(view, 'format_replica_tasks_execution') - @mock.patch.object(view_utils, 'format_opt') - def test_format_replica(self, mock_format_opt, - mock_format_replica_tasks_execution): - mock_format_opt.return_value = { - "executions": [{'id': 'mock_id1'}, {'id': 'mock_id2'}], - "mock_key": "mock_value" - } - - expected_calls = [ - mock.call.mock_format_replica_tasks_execution( - {'id': 'mock_id1'}), - mock.call.mock_format_replica_tasks_execution( - {'id': 'mock_id2'})] - expected_result = { - "executions": - [mock_format_replica_tasks_execution.return_value, - mock_format_replica_tasks_execution.return_value], - 'mock_key': 'mock_value' - } - - replica = mock.sentinel.replica - keys = mock.sentinel.keys - result = replica_view._format_replica(replica, keys) - - mock_format_opt.assert_called_once_with(replica, keys) - mock_format_replica_tasks_execution.assert_has_calls( - expected_calls - ) - self.assertEqual( - expected_result, - result - ) - - @mock.patch.object(view, 'format_replica_tasks_execution') - @mock.patch.object(view_utils, 'format_opt') - def test_format_replica_no_keys(self, mock_format_opt, - mock_format_replica_tasks_execution): - mock_format_opt.return_value = { - "executions": [{'id': 'mock_id1'}, {'id': 'mock_id2'}], - } - - expected_calls = [ - mock.call.mock_format_replica_tasks_execution( - {'id': 'mock_id1'}), - mock.call.mock_format_replica_tasks_execution( - {'id': 'mock_id2'})] - expected_result = { - "executions": - [mock_format_replica_tasks_execution.return_value, - mock_format_replica_tasks_execution.return_value], - } - - replica = mock.sentinel.replica - keys = mock.sentinel.keys - result = replica_view._format_replica(replica, keys) - - mock_format_opt.assert_called_once_with(replica, keys) - mock_format_replica_tasks_execution.assert_has_calls( - expected_calls - ) - self.assertEqual( - expected_result, - result - ) - - @mock.patch.object(view_utils, 'format_opt') - def test_format_replica_no_executions(self, mock_format_opt): - mock_format_opt.return_value = { - "mock_key": "mock_value" - } - - expected_result = { - 'executions': [], - 'mock_key': 'mock_value' - } - - replica = mock.sentinel.replica - keys = mock.sentinel.keys - result = replica_view._format_replica(replica, keys) - - mock_format_opt.assert_called_once_with(replica, keys) - self.assertEqual( - expected_result, - result - ) - - def test_single(self): - fun = getattr(replica_view, 'single') - self._single_view_test(fun, 'replica') - - def test_collection(self): - fun = getattr(replica_view, 'collection') - self._collection_view_test(fun, 'replicas') diff --git a/coriolis/tests/api/v1/views/test_replica_schedule_view.py b/coriolis/tests/api/v1/views/test_transfer_schedule_view.py similarity index 57% rename from coriolis/tests/api/v1/views/test_replica_schedule_view.py rename to coriolis/tests/api/v1/views/test_transfer_schedule_view.py index 3baffaad3..6f3e29443 100644 --- a/coriolis/tests/api/v1/views/test_replica_schedule_view.py +++ b/coriolis/tests/api/v1/views/test_transfer_schedule_view.py @@ -1,17 +1,17 @@ # Copyright 2023 Cloudbase Solutions Srl # All Rights Reserved. -from coriolis.api.v1.views import replica_schedule_view +from coriolis.api.v1.views import transfer_schedule_view from coriolis.tests import test_base -class ReplicaViewTestCase(test_base.CoriolisApiViewsTestCase): +class TransferViewTestCase(test_base.CoriolisApiViewsTestCase): """Test suite for the Coriolis api v1 views.""" def test_single(self): - fun = getattr(replica_schedule_view, 'single') + fun = getattr(transfer_schedule_view, 'single') self._single_view_test(fun, 'schedule') def test_collection(self): - fun = getattr(replica_schedule_view, 'collection') + fun = getattr(transfer_schedule_view, 'collection') self._collection_view_test(fun, 'schedules') diff --git a/coriolis/tests/api/v1/views/test_replica_task_execution_view.py b/coriolis/tests/api/v1/views/test_transfer_task_execution_view.py similarity index 88% rename from coriolis/tests/api/v1/views/test_replica_task_execution_view.py rename to coriolis/tests/api/v1/views/test_transfer_task_execution_view.py index 9e9dbfd91..4cf58ff51 100644 --- a/coriolis/tests/api/v1/views/test_replica_task_execution_view.py +++ b/coriolis/tests/api/v1/views/test_transfer_task_execution_view.py @@ -3,18 +3,18 @@ from unittest import mock -from coriolis.api.v1.views import replica_tasks_execution_view as view +from coriolis.api.v1.views import transfer_tasks_execution_view as view from coriolis.api.v1.views import utils as view_utils from coriolis import constants from coriolis.tests import test_base -class ReplicaTaskExecutionViewTestCase(test_base.CoriolisApiViewsTestCase): +class TransferTaskExecutionViewTestCase(test_base.CoriolisApiViewsTestCase): """Test suite for the Coriolis api v1 views.""" @mock.patch.object(view, '_sort_tasks') @mock.patch.object(view_utils, 'format_opt') - def test_format_replica_tasks_execution( + def test_format_transfer_tasks_execution( self, mock_format_opt, mock_sort_tasks @@ -27,7 +27,7 @@ def test_format_replica_tasks_execution( mock_sort_tasks.return_value = mock_execution keys = mock.sentinel.keys - result = view.format_replica_tasks_execution(mock_execution, keys) + result = view.format_transfer_tasks_execution(mock_execution, keys) mock_sort_tasks.assert_called_once_with(mock_tasks) mock_format_opt.assert_called_once_with(mock_execution["tasks"], keys) @@ -38,7 +38,7 @@ def test_format_replica_tasks_execution( @mock.patch.object(view, '_sort_tasks') @mock.patch.object(view_utils, 'format_opt') - def test_format_replica_tasks_execution_no_tasks( + def test_format_transfer_tasks_execution_no_tasks( self, mock_format_opt, mock_sort_tasks @@ -48,7 +48,7 @@ def test_format_replica_tasks_execution_no_tasks( } keys = mock.sentinel.keys - result = view.format_replica_tasks_execution(mock_execution, keys) + result = view.format_transfer_tasks_execution(mock_execution, keys) mock_sort_tasks.assert_not_called() mock_format_opt.assert_called_once_with(mock_execution, keys) diff --git a/coriolis/tests/api/v1/views/test_transfer_view.py b/coriolis/tests/api/v1/views/test_transfer_view.py new file mode 100644 index 000000000..cbcdc9570 --- /dev/null +++ b/coriolis/tests/api/v1/views/test_transfer_view.py @@ -0,0 +1,112 @@ +# Copyright 2023 Cloudbase Solutions Srl +# All Rights Reserved. + +from unittest import mock + +from coriolis.api.v1.views import transfer_tasks_execution_view as view +from coriolis.api.v1.views import transfer_view +from coriolis.api.v1.views import utils as view_utils +from coriolis.tests import test_base + + +class TransferViewTestCase(test_base.CoriolisApiViewsTestCase): + """Test suite for the Coriolis api v1 views.""" + + def setUp(self): + super(TransferViewTestCase, self).setUp() + self._format_fun = transfer_view._format_transfer + + @mock.patch.object(view, 'format_transfer_tasks_execution') + @mock.patch.object(view_utils, 'format_opt') + def test_format_transfer(self, mock_format_opt, + mock_format_transfer_tasks_execution): + mock_format_opt.return_value = { + "executions": [{'id': 'mock_id1'}, {'id': 'mock_id2'}], + "mock_key": "mock_value" + } + + expected_calls = [ + mock.call.mock_format_transfer_tasks_execution( + {'id': 'mock_id1'}), + mock.call.mock_format_transfer_tasks_execution( + {'id': 'mock_id2'})] + expected_result = { + "executions": + [mock_format_transfer_tasks_execution.return_value, + mock_format_transfer_tasks_execution.return_value], + 'mock_key': 'mock_value' + } + + transfer = mock.sentinel.transfer + keys = mock.sentinel.keys + result = transfer_view._format_transfer(transfer, keys) + + mock_format_opt.assert_called_once_with(transfer, keys) + mock_format_transfer_tasks_execution.assert_has_calls( + expected_calls + ) + self.assertEqual( + expected_result, + result + ) + + @mock.patch.object(view, 'format_transfer_tasks_execution') + @mock.patch.object(view_utils, 'format_opt') + def test_format_transfer_no_keys(self, mock_format_opt, + mock_format_transfer_tasks_execution): + mock_format_opt.return_value = { + "executions": [{'id': 'mock_id1'}, {'id': 'mock_id2'}], + } + + expected_calls = [ + mock.call.mock_format_transfer_tasks_execution( + {'id': 'mock_id1'}), + mock.call.mock_format_transfer_tasks_execution( + {'id': 'mock_id2'})] + expected_result = { + "executions": + [mock_format_transfer_tasks_execution.return_value, + mock_format_transfer_tasks_execution.return_value], + } + + transfer = mock.sentinel.transfer + keys = mock.sentinel.keys + result = transfer_view._format_transfer(transfer, keys) + + mock_format_opt.assert_called_once_with(transfer, keys) + mock_format_transfer_tasks_execution.assert_has_calls( + expected_calls + ) + self.assertEqual( + expected_result, + result + ) + + @mock.patch.object(view_utils, 'format_opt') + def test_format_transfer_no_executions(self, mock_format_opt): + mock_format_opt.return_value = { + "mock_key": "mock_value" + } + + expected_result = { + 'executions': [], + 'mock_key': 'mock_value' + } + + transfer = mock.sentinel.transfer + keys = mock.sentinel.keys + result = transfer_view._format_transfer(transfer, keys) + + mock_format_opt.assert_called_once_with(transfer, keys) + self.assertEqual( + expected_result, + result + ) + + def test_single(self): + fun = getattr(transfer_view, 'single') + self._single_view_test(fun, 'transfer') + + def test_collection(self): + fun = getattr(transfer_view, 'collection') + self._collection_view_test(fun, 'transfers') diff --git a/coriolis/tests/cmd/test_replica_cron.py b/coriolis/tests/cmd/test_replica_cron.py index 7b5e49966..03097e279 100644 --- a/coriolis/tests/cmd/test_replica_cron.py +++ b/coriolis/tests/cmd/test_replica_cron.py @@ -4,41 +4,41 @@ import sys from unittest import mock -from coriolis.cmd import replica_cron +from coriolis.cmd import transfer_cron from coriolis import constants -from coriolis.replica_cron.rpc import server as rpc_server from coriolis import service from coriolis.tests import test_base +from coriolis.transfer_cron.rpc import server as rpc_server from coriolis import utils -class ReplicaCronTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis replica_cron CMD""" +class TransferCronTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis transfer_cron CMD""" @mock.patch.object(service, 'service') @mock.patch.object(service, 'MessagingService') - @mock.patch.object(rpc_server, 'ReplicaCronServerEndpoint') + @mock.patch.object(rpc_server, 'TransferCronServerEndpoint') @mock.patch.object(utils, 'setup_logging') - @mock.patch('coriolis.cmd.replica_cron.CONF') + @mock.patch('coriolis.cmd.transfer_cron.CONF') @mock.patch.object(sys, 'argv') def test_main( self, mock_argv, mock_conf, mock_setup_logging, - mock_ReplicaCronServerEndpoint, + mock_TransferCronServerEndpoint, mock_MessagingService, mock_service ): - replica_cron.main() + transfer_cron.main() mock_conf.assert_called_once_with( mock_argv[1:], project='coriolis', version="1.0.0") mock_setup_logging.assert_called_once() - mock_ReplicaCronServerEndpoint.assert_called_once() + mock_TransferCronServerEndpoint.assert_called_once() mock_MessagingService.assert_called_once_with( - constants.REPLICA_CRON_MAIN_MESSAGING_TOPIC, - [mock_ReplicaCronServerEndpoint.return_value], + constants.TRANSFER_CRON_MAIN_MESSAGING_TOPIC, + [mock_TransferCronServerEndpoint.return_value], rpc_server.VERSION, worker_count=1) mock_service.launch.assert_called_once_with( diff --git a/coriolis/tests/conductor/rpc/data/deploy_replica_instance_config.yml b/coriolis/tests/conductor/rpc/data/deploy_replica_instance_config.yml deleted file mode 100644 index ba0fc064b..000000000 --- a/coriolis/tests/conductor/rpc/data/deploy_replica_instance_config.yml +++ /dev/null @@ -1,182 +0,0 @@ -- config: - skip_os_morphing: False - has_os_morphing_minion: True - expected_tasks: - - type: 'VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY' - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: 'CREATE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY'] - - type: 'DEPLOY_REPLICA_INSTANCE_RESOURCES' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: 'ATTACH_VOLUMES_TO_OSMORPHING_MINION' - depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: 'COLLECT_OS_MORPHING_INFO' - depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION'] - - type: 'OS_MORPHING' - depends_on: ['COLLECT_OS_MORPHING_INFO'] - - type: 'DETACH_VOLUMES_FROM_OSMORPHING_MINION' - depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION', 'OS_MORPHING'] - on_error: True - - type: 'RELEASE_OSMORPHING_MINION' - depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DETACH_VOLUMES_FROM_OSMORPHING_MINION'] - on_error: True - - type: 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['OS_MORPHING', 'RELEASE_OSMORPHING_MINION'] - - type: 'DELETE_REPLICA_TARGET_DISK_SNAPSHOTS' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: 'CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: 'RESTORE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - -- config: - skip_os_morphing: False - has_os_morphing_minion: False - expected_tasks: - - type: 'CREATE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: 'DEPLOY_REPLICA_INSTANCE_RESOURCES' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: 'DEPLOY_OS_MORPHING_RESOURCES' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: 'OS_MORPHING' - depends_on: ['DEPLOY_OS_MORPHING_RESOURCES'] - - type: 'DELETE_OS_MORPHING_RESOURCES' - depends_on: ['DEPLOY_OS_MORPHING_RESOURCES', 'OS_MORPHING'] - on_error: True - - type: 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['OS_MORPHING', 'DELETE_OS_MORPHING_RESOURCES'] - - type: 'DELETE_REPLICA_TARGET_DISK_SNAPSHOTS' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: 'CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: 'RESTORE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - -- config: - skip_os_morphing: True - expected_tasks: - - type: 'CREATE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: 'DEPLOY_REPLICA_INSTANCE_RESOURCES' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: 'DELETE_REPLICA_TARGET_DISK_SNAPSHOTS' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: 'CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: 'RESTORE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - -- config: - skip_os_morphing: True - get_optimal_flavor: True - expected_tasks: - - type: 'CREATE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: 'DEPLOY_REPLICA_INSTANCE_RESOURCES' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: 'GET_OPTIMAL_FLAVOR' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['GET_OPTIMAL_FLAVOR'] - - type: 'DELETE_REPLICA_TARGET_DISK_SNAPSHOTS' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: 'CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: 'RESTORE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - -- config: - skip_os_morphing: True - clone_disks: True - expected_tasks: - - type: 'CREATE_REPLICA_DISK_SNAPSHOTS' - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: 'DEPLOY_REPLICA_INSTANCE_RESOURCES' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: 'DELETE_REPLICA_TARGET_DISK_SNAPSHOTS' - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - - type: 'CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT' - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - -- config: - get_optimal_flavor: True - skip_os_morphing: False - expected_tasks: - - type: CREATE_REPLICA_DISK_SNAPSHOTS - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: DEPLOY_REPLICA_INSTANCE_RESOURCES - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: DEPLOY_OS_MORPHING_RESOURCES - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: OS_MORPHING - depends_on: ['DEPLOY_OS_MORPHING_RESOURCES'] - - type: DELETE_OS_MORPHING_RESOURCES - depends_on: ['DEPLOY_OS_MORPHING_RESOURCES', 'OS_MORPHING'] - on_error: True - - type: GET_OPTIMAL_FLAVOR - depends_on: ['OS_MORPHING', 'DELETE_OS_MORPHING_RESOURCES'] - - type: FINALIZE_REPLICA_INSTANCE_DEPLOYMENT - depends_on: ['GET_OPTIMAL_FLAVOR'] - - type: DELETE_REPLICA_TARGET_DISK_SNAPSHOTS - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: RESTORE_REPLICA_DISK_SNAPSHOTS - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True - -- config: - get_optimal_flavor: True - skip_os_morphing: False - has_os_morphing_minion: True - expected_tasks: - - type: VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY - depends_on: ['VALIDATE_REPLICA_DEPLOYMENT_INPUTS'] - - type: DEPLOY_REPLICA_INSTANCE_RESOURCES - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS'] - - type: ATTACH_VOLUMES_TO_OSMORPHING_MINION - depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DEPLOY_REPLICA_INSTANCE_RESOURCES'] - - type: COLLECT_OS_MORPHING_INFO - depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION'] - - type: OS_MORPHING - depends_on: ['COLLECT_OS_MORPHING_INFO'] - - type: DETACH_VOLUMES_FROM_OSMORPHING_MINION - depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION', 'OS_MORPHING'] - on_error: True - - type: RELEASE_OSMORPHING_MINION - depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DETACH_VOLUMES_FROM_OSMORPHING_MINION'] - on_error: True - - type: GET_OPTIMAL_FLAVOR - depends_on: ['OS_MORPHING', 'RELEASE_OSMORPHING_MINION'] - - type: FINALIZE_REPLICA_INSTANCE_DEPLOYMENT - depends_on: ['GET_OPTIMAL_FLAVOR'] - - type: DELETE_REPLICA_TARGET_DISK_SNAPSHOTS - depends_on: ['CREATE_REPLICA_DISK_SNAPSHOTS', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: False - - type: CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT - depends_on: ['DEPLOY_REPLICA_INSTANCE_RESOURCES', 'FINALIZE_REPLICA_INSTANCE_DEPLOYMENT'] - on_error_only: True - - type: RESTORE_REPLICA_DISK_SNAPSHOTS - depends_on: ['CLEANUP_FAILED_REPLICA_INSTANCE_DEPLOYMENT'] - on_error: True diff --git a/coriolis/tests/conductor/rpc/data/deploy_transfer_instance_config.yml b/coriolis/tests/conductor/rpc/data/deploy_transfer_instance_config.yml new file mode 100644 index 000000000..b3baa5cf5 --- /dev/null +++ b/coriolis/tests/conductor/rpc/data/deploy_transfer_instance_config.yml @@ -0,0 +1,182 @@ +- config: + skip_os_morphing: False + has_os_morphing_minion: True + expected_tasks: + - type: 'VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY' + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: 'CREATE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY'] + - type: 'DEPLOY_INSTANCE_RESOURCES' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: 'ATTACH_VOLUMES_TO_OSMORPHING_MINION' + depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DEPLOY_INSTANCE_RESOURCES'] + - type: 'COLLECT_OS_MORPHING_INFO' + depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION'] + - type: 'OS_MORPHING' + depends_on: ['COLLECT_OS_MORPHING_INFO'] + - type: 'DETACH_VOLUMES_FROM_OSMORPHING_MINION' + depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION', 'OS_MORPHING'] + on_error: True + - type: 'RELEASE_OSMORPHING_MINION' + depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DETACH_VOLUMES_FROM_OSMORPHING_MINION'] + on_error: True + - type: 'FINALIZE_INSTANCE_DEPLOYMENT' + depends_on: ['OS_MORPHING', 'RELEASE_OSMORPHING_MINION'] + - type: 'DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: 'CLEANUP_FAILED_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: 'RESTORE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True + +- config: + skip_os_morphing: False + has_os_morphing_minion: False + expected_tasks: + - type: 'CREATE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: 'DEPLOY_INSTANCE_RESOURCES' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: 'DEPLOY_OS_MORPHING_RESOURCES' + depends_on: ['DEPLOY_INSTANCE_RESOURCES'] + - type: 'OS_MORPHING' + depends_on: ['DEPLOY_OS_MORPHING_RESOURCES'] + - type: 'DELETE_OS_MORPHING_RESOURCES' + depends_on: ['DEPLOY_OS_MORPHING_RESOURCES', 'OS_MORPHING'] + on_error: True + - type: 'FINALIZE_INSTANCE_DEPLOYMENT' + depends_on: ['OS_MORPHING', 'DELETE_OS_MORPHING_RESOURCES'] + - type: 'DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: 'CLEANUP_FAILED_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: 'RESTORE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True + +- config: + skip_os_morphing: True + expected_tasks: + - type: 'CREATE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: 'DEPLOY_INSTANCE_RESOURCES' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: 'FINALIZE_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES'] + - type: 'DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: 'CLEANUP_FAILED_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: 'RESTORE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True + +- config: + skip_os_morphing: True + get_optimal_flavor: True + expected_tasks: + - type: 'CREATE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: 'DEPLOY_INSTANCE_RESOURCES' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: 'GET_OPTIMAL_FLAVOR' + depends_on: ['DEPLOY_INSTANCE_RESOURCES'] + - type: 'FINALIZE_INSTANCE_DEPLOYMENT' + depends_on: ['GET_OPTIMAL_FLAVOR'] + - type: 'DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: 'CLEANUP_FAILED_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: 'RESTORE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True + +- config: + skip_os_morphing: True + clone_disks: True + expected_tasks: + - type: 'CREATE_TRANSFER_DISK_SNAPSHOTS' + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: 'DEPLOY_INSTANCE_RESOURCES' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: 'FINALIZE_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES'] + - type: 'DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS' + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: True + - type: 'CLEANUP_FAILED_INSTANCE_DEPLOYMENT' + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + +- config: + get_optimal_flavor: True + skip_os_morphing: False + expected_tasks: + - type: CREATE_TRANSFER_DISK_SNAPSHOTS + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: DEPLOY_INSTANCE_RESOURCES + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: DEPLOY_OS_MORPHING_RESOURCES + depends_on: ['DEPLOY_INSTANCE_RESOURCES'] + - type: OS_MORPHING + depends_on: ['DEPLOY_OS_MORPHING_RESOURCES'] + - type: DELETE_OS_MORPHING_RESOURCES + depends_on: ['DEPLOY_OS_MORPHING_RESOURCES', 'OS_MORPHING'] + on_error: True + - type: GET_OPTIMAL_FLAVOR + depends_on: ['OS_MORPHING', 'DELETE_OS_MORPHING_RESOURCES'] + - type: FINALIZE_INSTANCE_DEPLOYMENT + depends_on: ['GET_OPTIMAL_FLAVOR'] + - type: DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: CLEANUP_FAILED_INSTANCE_DEPLOYMENT + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: RESTORE_TRANSFER_DISK_SNAPSHOTS + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True + +- config: + get_optimal_flavor: True + skip_os_morphing: False + has_os_morphing_minion: True + expected_tasks: + - type: VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY + depends_on: ['VALIDATE_DEPLOYMENT_INPUTS'] + - type: DEPLOY_INSTANCE_RESOURCES + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS'] + - type: ATTACH_VOLUMES_TO_OSMORPHING_MINION + depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DEPLOY_INSTANCE_RESOURCES'] + - type: COLLECT_OS_MORPHING_INFO + depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION'] + - type: OS_MORPHING + depends_on: ['COLLECT_OS_MORPHING_INFO'] + - type: DETACH_VOLUMES_FROM_OSMORPHING_MINION + depends_on: ['ATTACH_VOLUMES_TO_OSMORPHING_MINION', 'OS_MORPHING'] + on_error: True + - type: RELEASE_OSMORPHING_MINION + depends_on: ['VALIDATE_OSMORPHING_MINION_POOL_COMPATIBILITY', 'DETACH_VOLUMES_FROM_OSMORPHING_MINION'] + on_error: True + - type: GET_OPTIMAL_FLAVOR + depends_on: ['OS_MORPHING', 'RELEASE_OSMORPHING_MINION'] + - type: FINALIZE_INSTANCE_DEPLOYMENT + depends_on: ['GET_OPTIMAL_FLAVOR'] + - type: DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS + depends_on: ['CREATE_TRANSFER_DISK_SNAPSHOTS', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error: False + - type: CLEANUP_FAILED_INSTANCE_DEPLOYMENT + depends_on: ['DEPLOY_INSTANCE_RESOURCES', 'FINALIZE_INSTANCE_DEPLOYMENT'] + on_error_only: True + - type: RESTORE_TRANSFER_DISK_SNAPSHOTS + depends_on: ['CLEANUP_FAILED_INSTANCE_DEPLOYMENT'] + on_error: True diff --git a/coriolis/tests/conductor/rpc/data/execute_replica_tasks_config.yml b/coriolis/tests/conductor/rpc/data/execute_transfer_tasks_config.yml similarity index 57% rename from coriolis/tests/conductor/rpc/data/execute_replica_tasks_config.yml rename to coriolis/tests/conductor/rpc/data/execute_transfer_tasks_config.yml index 5d55b175c..e4093ea3d 100644 --- a/coriolis/tests/conductor/rpc/data/execute_replica_tasks_config.yml +++ b/coriolis/tests/conductor/rpc/data/execute_transfer_tasks_config.yml @@ -5,24 +5,24 @@ shutdown_instances: False expected_tasks: - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_REPLICA_SOURCE_INPUTS', 'VALIDATE_REPLICA_DESTINATION_INPUTS'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_TRANSFER_SOURCE_INPUTS', 'VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - - type: 'DEPLOY_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'REPLICATE_DISKS' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'DEPLOY_REPLICA_TARGET_RESOURCES'] + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'DEPLOY_TRANSFER_TARGET_RESOURCES'] - - type: 'DELETE_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'REPLICATE_DISKS'] on_error: True - - type: 'DELETE_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES', 'REPLICATE_DISKS'] on_error: True - config: @@ -32,23 +32,23 @@ expected_tasks: - type: 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY' - depends_on: ['VALIDATE_REPLICA_DESTINATION_INPUTS'] + depends_on: ['VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_REPLICA_SOURCE_INPUTS', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_TRANSFER_SOURCE_INPUTS', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] - - type: 'DELETE_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'REPLICATE_DISKS'] on_error: True - type: 'ATTACH_VOLUMES_TO_DESTINATION_MINION' - depends_on: ['DEPLOY_REPLICA_DISKS'] + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'REPLICATE_DISKS' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'ATTACH_VOLUMES_TO_DESTINATION_MINION'] + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'ATTACH_VOLUMES_TO_DESTINATION_MINION'] - - type: 'DEPLOY_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'DETACH_VOLUMES_FROM_DESTINATION_MINION' depends_on: ['ATTACH_VOLUMES_TO_DESTINATION_MINION', 'REPLICATE_DISKS'] @@ -57,7 +57,7 @@ type: 'RELEASE_DESTINATION_MINION' depends_on: ['VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY', 'DETACH_VOLUMES_FROM_DESTINATION_MINION'] on_error: True - + - config: origin_minion_pool: True @@ -66,23 +66,23 @@ expected_tasks: - type: 'VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY' - depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_REPLICA_SOURCE_INPUTS'] + depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_TRANSFER_SOURCE_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_REPLICA_DESTINATION_INPUTS'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'REPLICATE_DISKS' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES'] + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES'] - type: 'RELEASE_SOURCE_MINION' depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'REPLICATE_DISKS'] on_error: True - - type: 'DELETE_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES', 'REPLICATE_DISKS'] on_error: True - config: @@ -92,16 +92,16 @@ expected_tasks: - type: 'VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY' - depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_REPLICA_SOURCE_INPUTS'] + depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_TRANSFER_SOURCE_INPUTS'] - type: 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY' - depends_on: ['VALIDATE_REPLICA_DESTINATION_INPUTS'] + depends_on: ['VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' + type: 'DEPLOY_TRANSFER_DISKS' depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] - type: 'ATTACH_VOLUMES_TO_DESTINATION_MINION' - depends_on: ['DEPLOY_REPLICA_DISKS'] + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'REPLICATE_DISKS' depends_on: ['ATTACH_VOLUMES_TO_DESTINATION_MINION'] @@ -124,27 +124,27 @@ shutdown_instances: True expected_tasks: - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_REPLICA_SOURCE_INPUTS', 'VALIDATE_REPLICA_DESTINATION_INPUTS'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_TRANSFER_SOURCE_INPUTS', 'VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - - type: 'DEPLOY_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'SHUTDOWN_INSTANCE' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'DEPLOY_REPLICA_TARGET_RESOURCES'] + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'DEPLOY_TRANSFER_TARGET_RESOURCES'] - type: 'REPLICATE_DISKS' depends_on: ['SHUTDOWN_INSTANCE'] - - type: 'DELETE_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'REPLICATE_DISKS'] on_error: True - - type: 'DELETE_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES', 'REPLICATE_DISKS'] on_error: True - config: @@ -154,25 +154,25 @@ expected_tasks: - type: 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY' - depends_on: ['VALIDATE_REPLICA_DESTINATION_INPUTS'] + depends_on: ['VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_REPLICA_SOURCE_INPUTS', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_TRANSFER_SOURCE_INPUTS', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] - - type: 'DEPLOY_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'ATTACH_VOLUMES_TO_DESTINATION_MINION' - depends_on: ['DEPLOY_REPLICA_DISKS'] + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'SHUTDOWN_INSTANCE' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'ATTACH_VOLUMES_TO_DESTINATION_MINION'] + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'ATTACH_VOLUMES_TO_DESTINATION_MINION'] - type: 'REPLICATE_DISKS' depends_on: ['SHUTDOWN_INSTANCE'] - - type: 'DELETE_REPLICA_SOURCE_RESOURCES' - depends_on: ['DEPLOY_REPLICA_SOURCE_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_SOURCE_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_SOURCE_RESOURCES', 'REPLICATE_DISKS'] on_error: True - type: 'DETACH_VOLUMES_FROM_DESTINATION_MINION' @@ -190,16 +190,16 @@ expected_tasks: - type: 'VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY' - depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_REPLICA_SOURCE_INPUTS'] + depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_TRANSFER_SOURCE_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' - depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_REPLICA_DESTINATION_INPUTS'] + type: 'DEPLOY_TRANSFER_DISKS' + depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_DISKS'] + type: 'DEPLOY_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'SHUTDOWN_INSTANCE' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES'] + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES'] - type: 'REPLICATE_DISKS' depends_on: ['SHUTDOWN_INSTANCE'] @@ -208,8 +208,8 @@ depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'REPLICATE_DISKS'] on_error: True - - type: 'DELETE_REPLICA_TARGET_RESOURCES' - depends_on: ['DEPLOY_REPLICA_TARGET_RESOURCES', 'REPLICATE_DISKS'] + type: 'DELETE_TRANSFER_TARGET_RESOURCES' + depends_on: ['DEPLOY_TRANSFER_TARGET_RESOURCES', 'REPLICATE_DISKS'] on_error: True - config: @@ -219,16 +219,16 @@ expected_tasks: - type: 'VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY' - depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_REPLICA_SOURCE_INPUTS'] + depends_on: ['GET_INSTANCE_INFO', 'VALIDATE_TRANSFER_SOURCE_INPUTS'] - type: 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY' - depends_on: ['VALIDATE_REPLICA_DESTINATION_INPUTS'] + depends_on: ['VALIDATE_TRANSFER_DESTINATION_INPUTS'] - - type: 'DEPLOY_REPLICA_DISKS' + type: 'DEPLOY_TRANSFER_DISKS' depends_on: ['VALIDATE_SOURCE_MINION_POOL_COMPATIBILITY', 'VALIDATE_DESTINATION_MINION_POOL_COMPATIBILITY'] - type: 'ATTACH_VOLUMES_TO_DESTINATION_MINION' - depends_on: ['DEPLOY_REPLICA_DISKS'] + depends_on: ['DEPLOY_TRANSFER_DISKS'] - type: 'SHUTDOWN_INSTANCE' depends_on: ['ATTACH_VOLUMES_TO_DESTINATION_MINION'] diff --git a/coriolis/tests/conductor/rpc/data/update_replica_config.yml b/coriolis/tests/conductor/rpc/data/update_transfer_config.yml similarity index 88% rename from coriolis/tests/conductor/rpc/data/update_replica_config.yml rename to coriolis/tests/conductor/rpc/data/update_transfer_config.yml index 74131f7f8..4c1731d39 100644 --- a/coriolis/tests/conductor/rpc/data/update_replica_config.yml +++ b/coriolis/tests/conductor/rpc/data/update_transfer_config.yml @@ -1,5 +1,5 @@ - config: - replica: + transfer: instances: ['mock_instance_1', 'mock_instance_2'] info: mock_instance_1: {} @@ -15,10 +15,10 @@ destination_environment: network_map: "mock_network_map" has_updated_values: True - has_replica_instance: True + has_transfer_instance: True - config: - replica: {} + transfer: {} updated_properties: origin_minion_pool_id: "mock_origin_minion_pool_id" destination_minion_pool_id: "mock_destination_minion_pool_id" @@ -29,10 +29,10 @@ destination_environment: network_map: "mock_network_map" has_updated_values: True - has_replica_instance: False + has_transfer_instance: False - config: - replica: {} + transfer: {} updated_properties: {} has_updated_values: False - has_replica_instance: False \ No newline at end of file + has_transfer_instance: False \ No newline at end of file diff --git a/coriolis/tests/conductor/rpc/test_client.py b/coriolis/tests/conductor/rpc/test_client.py index c03b8654d..91db7bfc4 100644 --- a/coriolis/tests/conductor/rpc/test_client.py +++ b/coriolis/tests/conductor/rpc/test_client.py @@ -7,8 +7,8 @@ from coriolis import constants from coriolis.tests import test_base - INSTANCE_ARGS = { + "transfer_scenario": "mock_transfer_scenario", "origin_endpoint_id": "mock_origin_endpoint_id", "destination_endpoint_id": "mock_destination_endpoint_id", "origin_minion_pool_id": "mock_origin_minion_pool_id", @@ -149,44 +149,44 @@ def test_get_provider_schemas(self): } self._test(self.client.get_provider_schemas, args) - def test_execute_replica_tasks(self): + def test_execute_transfer_tasks(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "shutdown_instances": False } - self._test(self.client.execute_replica_tasks, args) + self._test(self.client.execute_transfer_tasks, args) - def test_get_replica_tasks_executions(self): + def test_get_transfer_tasks_executions(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "include_tasks": False } - self._test(self.client.get_replica_tasks_executions, args) + self._test(self.client.get_transfer_tasks_executions, args) - def test_get_replica_tasks_execution(self): + def test_get_transfer_tasks_execution(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "execution_id": "mock_execution_id", "include_task_info": False } - self._test(self.client.get_replica_tasks_execution, args) + self._test(self.client.get_transfer_tasks_execution, args) - def test_delete_replica_tasks_execution(self): + def test_delete_transfer_tasks_execution(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "execution_id": "mock_execution_id" } - self._test(self.client.delete_replica_tasks_execution, args) + self._test(self.client.delete_transfer_tasks_execution, args) - def test_cancel_replica_tasks_execution(self): + def test_cancel_transfer_tasks_execution(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "execution_id": "mock_execution_id", "force": "mock_force" } - self._test(self.client.cancel_replica_tasks_execution, args) + self._test(self.client.cancel_transfer_tasks_execution, args) - def test_create_instances_replica(self): + def test_create_instances_transfer(self): args = { **INSTANCE_ARGS, } @@ -195,85 +195,44 @@ def test_create_instances_replica(self): "user_scripts": None } args.update(new_args) - self._test(self.client.create_instances_replica, args) + self._test(self.client.create_instances_transfer, args) - def test_get_replicas(self): + def test_get_transfers(self): args = { "include_tasks_executions": False, "include_task_info": False, } - self._test(self.client.get_replicas, args) + self._test(self.client.get_transfers, args) - def test_get_replica(self): + def test_get_transfer(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "include_task_info": False, } - self._test(self.client.get_replica, args) + self._test(self.client.get_transfer, args) - def test_delete_replica(self): + def test_delete_transfer(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", } - self._test(self.client.delete_replica, args) + self._test(self.client.delete_transfer, args) - def test_delete_replica_disks(self): + def test_delete_transfer_disks(self): args = { - "replica_id": "mock_replica_id" + "transfer_id": "mock_transfer_id" } - self._test(self.client.delete_replica_disks, args) + self._test(self.client.delete_transfer_disks, args) - def test_get_migrations(self): + def test_deploy_transfer_instances(self): args = { - "include_tasks": False, - "include_task_info": False, - } - self._test(self.client.get_migrations, args) - - def test_get_migration(self): - args = { - "migration_id": "mock_migration_id", - "include_task_info": False, - } - self._test(self.client.get_migration, args) - - def test_migrate_instances(self): - args = { - **INSTANCE_ARGS, - "replication_count": 1, - "shutdown_instances": False, - "skip_os_morphing": False - } - new_args = { - "notes": None, - "user_scripts": None - } - args.update(new_args) - self._test(self.client.migrate_instances, args) - - def test_deploy_replica_instances(self): - args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "instance_osmorphing_minion_pool_mappings": None, "clone_disks": False, "force": False, "skip_os_morphing": False, "user_scripts": None } - self._test(self.client.deploy_replica_instances, args) - - def test_delete_migration(self): - args = { - "migration_id": "mock_migration_id" - } - self._test(self.client.delete_migration, args) - - def test_cancel_migration(self): - args = { - "migration_id": "mock_migration_id", - "force": "mock_force" - } - self._test(self.client.cancel_migration, args) + self._test(self.client.deploy_transfer_instances, args) def test_set_task_host(self): args = { @@ -329,52 +288,52 @@ def test_update_task_progress_update(self): self._test(self.client.update_task_progress_update, args, rpc_op='_cast') - def test_create_replica_schedule(self): + def test_create_transfer_schedule(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "schedule": "mock_schedule", "enabled": "mock_enabled", "exp_date": "mock_exp_date", "shutdown_instance": "mock_shutdown_instance" } - self._test(self.client.create_replica_schedule, args) + self._test(self.client.create_transfer_schedule, args) - def test_update_replica_schedule(self): + def test_update_transfer_schedule(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "schedule_id": "mock_schedule_id", "updated_values": "mock_updated_values" } - self._test(self.client.update_replica_schedule, args) + self._test(self.client.update_transfer_schedule, args) - def test_delete_replica_schedule(self): + def test_delete_transfer_schedule(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "schedule_id": "mock_schedule_id" } - self._test(self.client.delete_replica_schedule, args) + self._test(self.client.delete_transfer_schedule, args) - def test_get_replica_schedules(self): + def test_get_transfer_schedules(self): args = { - "replica_id": None, + "transfer_id": None, "expired": True } - self._test(self.client.get_replica_schedules, args) + self._test(self.client.get_transfer_schedules, args) - def test_get_replica_schedule(self): + def test_get_transfer_schedule(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "schedule_id": "mock_schedule_id", "expired": True } - self._test(self.client.get_replica_schedule, args) + self._test(self.client.get_transfer_schedule, args) - def test_update_replica(self): + def test_update_transfer(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "updated_properties": "mock_updated_properties" } - self._test(self.client.update_replica, args) + self._test(self.client.update_transfer, args) def test_get_diagnostics(self): self._test(self.client.get_diagnostics, args={}) @@ -460,35 +419,36 @@ def test_delete_service(self): } self._test(self.client.delete_service, args) - def test_confirm_replica_minions_allocation(self): + def test_confirm_transfer_minions_allocation(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "minion_machine_allocations": "mock_minion_machine_allocations" } - self._test(self.client.confirm_replica_minions_allocation, args) + self._test(self.client.confirm_transfer_minions_allocation, args) - def test_report_replica_minions_allocation_error(self): + def test_report_transfer_minions_allocation_error(self): args = { - "replica_id": "mock_replica_id", + "transfer_id": "mock_transfer_id", "minion_allocation_error_details": "mock_minion_allocation_error_details" } - self._test(self.client.report_replica_minions_allocation_error, args) + self._test(self.client.report_transfer_minions_allocation_error, args) - def test_confirm_migration_minions_allocation(self): + def test_confirm_deployment_minions_allocation(self): args = { - "migration_id": "mock_migration_id", + "deployment_id": "mock_deployment_id", "minion_machine_allocations": "mock_minion_machine_allocations" } - self._test(self.client.confirm_migration_minions_allocation, args) + self._test(self.client.confirm_deployment_minions_allocation, args) - def test_report_migration_minions_allocation_error(self): + def test_report_deployment_minions_allocation_error(self): args = { - "migration_id": "mock_migration_id", + "deployment_id": "mock_deployment_id", "minion_allocation_error_details": "mock_minion_allocation_error_details" } - self._test(self.client.report_migration_minions_allocation_error, args) + self._test( + self.client.report_deployment_minions_allocation_error, args) def test_add_task_progress_update(self): args = { diff --git a/coriolis/tests/conductor/rpc/test_server.py b/coriolis/tests/conductor/rpc/test_server.py index 37ca27147..3018bb450 100644 --- a/coriolis/tests/conductor/rpc/test_server.py +++ b/coriolis/tests/conductor/rpc/test_server.py @@ -17,7 +17,6 @@ from coriolis.db.sqlalchemy import models from coriolis import exception from coriolis import keystone -from coriolis.licensing import client as licensing_client from coriolis import schemas from coriolis.tests import test_base from coriolis.tests import testutils @@ -136,136 +135,6 @@ def test_check_delete_reservation_for_transfer_delete_fails(self): transfer_action.reservation_id ) - def test_check_create_reservation_for_transfer(self): - transfer_action = mock.Mock() - transfer_action.instances = ['instance_1', 'instance_2'] - transfer_type = mock.sentinel.transfer_type - self._licensing_client.add_reservation.return_value = { - 'id': mock.sentinel.id - } - self.server._check_create_reservation_for_transfer( - transfer_action, transfer_type) - self._licensing_client.add_reservation.assert_called_once_with( - mock.sentinel.transfer_type, - 2 - ) - self.assertEqual( - transfer_action.reservation_id, - mock.sentinel.id - ) - - def test_check_create_reservation_for_transfer_no_licensing_client(self): - transfer_action = mock.Mock() - transfer_type = mock.sentinel.transfer_type - self.server._licensing_client = None - with self.assertLogs( - 'coriolis.conductor.rpc.server', level=logging.WARNING): - self.server._check_create_reservation_for_transfer( - transfer_action, transfer_type) - - def test_check_reservation_for_transfer(self): - transfer_action = mock.Mock() - transfer_action.reservation_id = mock.sentinel.reservation_id - reservation_type = mock.sentinel.reservation_type - self._licensing_client.check_refresh_reservation.return_value = { - 'id': mock.sentinel.reservation_id} - self.server._check_reservation_for_transfer( - transfer_action, reservation_type) - (self._licensing_client.check_refresh_reservation. - assert_called_once_with)( - mock.sentinel.reservation_id) - - def test_check_reservation_for_transfer_no_licensing_client( - self - ): - transfer_action = mock.Mock() - reservation_type = mock.sentinel.reservation_type - self.server._licensing_client = None - with self.assertLogs( - 'coriolis.conductor.rpc.server', level=logging.WARNING): - self.server._check_reservation_for_transfer( - transfer_action, reservation_type) - - @mock.patch.object(server.ConductorServerEndpoint, - '_check_create_reservation_for_transfer') - def test_check_reservation_for_transfer_no_reservation_id( - self, - mock_check_create_reservation_for_transfer - ): - transfer_action = mock.Mock() - transfer_action.reservation_id = None - reservation_type = mock.sentinel.reservation_type - self.server._check_reservation_for_transfer( - transfer_action, reservation_type) - self._licensing_client.check_refresh_reservation.assert_not_called() - mock_check_create_reservation_for_transfer.assert_called_once_with( - transfer_action, reservation_type - ) - - @mock.patch.object(server.ConductorServerEndpoint, - '_check_create_reservation_for_transfer') - def test_check_reservation_for_transfer_exc_code_404( - self, - mock_check_create_reservation_for_transfer - ): - transfer_action = mock.Mock() - transfer_action.reservation_id = mock.sentinel.reservation_id - reservation_type = mock.sentinel.reservation_type - ex = CoriolisTestException() - ex.code = 404 - self._licensing_client.check_refresh_reservation.side_effect = ex - self.server._check_reservation_for_transfer( - transfer_action, reservation_type) - (self._licensing_client.check_refresh_reservation - .assert_called_once_with)( - mock.sentinel.reservation_id) - mock_check_create_reservation_for_transfer.assert_called_once_with( - transfer_action, reservation_type - ) - - @mock.patch.object(server.ConductorServerEndpoint, - '_check_create_reservation_for_transfer') - def test_check_reservation_for_transfer_exc_code_409( - self, - mock_check_create_reservation_for_transfer - ): - transfer_action = mock.Mock() - transfer_action.reservation_id = mock.sentinel.reservation_id - reservation_type = mock.sentinel.reservation_type - ex = CoriolisTestException() - ex.code = 409 - self._licensing_client.check_refresh_reservation.side_effect = ex - self.server._check_reservation_for_transfer( - transfer_action, reservation_type) - (self._licensing_client.check_refresh_reservation - .assert_called_once_with)( - mock.sentinel.reservation_id) - mock_check_create_reservation_for_transfer.assert_called_once_with( - transfer_action, reservation_type - ) - - @mock.patch.object(server.ConductorServerEndpoint, - '_check_create_reservation_for_transfer') - def test_check_reservation_for_transfer_exc_code_not_excepted( - self, - mock_check_create_reservation_for_transfer - ): - transfer_action = mock.Mock() - transfer_action.reservation_id = mock.sentinel.reservation_id - reservation_type = mock.sentinel.reservation_type - ex = CoriolisTestException() - self._licensing_client.check_refresh_reservation.side_effect = ex - self.assertRaises( - CoriolisTestException, - self.server._check_reservation_for_transfer, - transfer_action, - reservation_type - ) - (self._licensing_client.check_refresh_reservation - .assert_called_once_with)( - mock.sentinel.reservation_id) - mock_check_create_reservation_for_transfer.assert_not_called() - @mock.patch.object(server.ConductorServerEndpoint, "get_endpoint") @mock.patch.object(db_api, "delete_endpoint") @mock.patch.object(db_api, "update_endpoint") @@ -400,9 +269,9 @@ def call_get_endpoint(): self.assertRaises(exception.NotFound, call_get_endpoint) @mock.patch.object(db_api, "delete_endpoint") - @mock.patch.object(db_api, "get_endpoint_replicas_count") + @mock.patch.object(db_api, "get_endpoint_transfers_count") def test_delete_endpoint( - self, mock_get_endpoint_replicas_count, mock_delete_endpoint + self, mock_get_endpoint_transfers_count, mock_delete_endpoint ): def call_delete_endpoint(): return testutils.get_wrapped_function(self.server.delete_endpoint)( @@ -410,14 +279,14 @@ def call_delete_endpoint(): mock.sentinel.endpoint_id # type: ignore ) - mock_get_endpoint_replicas_count.return_value = 0 + mock_get_endpoint_transfers_count.return_value = 0 call_delete_endpoint() mock_delete_endpoint.assert_called_once_with( mock.sentinel.context, mock.sentinel.endpoint_id ) - # endpoint has replicas - mock_get_endpoint_replicas_count.return_value = 1 + # endpoint has transfers + mock_get_endpoint_transfers_count.return_value = 1 self.assertRaises(exception.NotAuthorized, call_delete_endpoint) @mock.patch.object( @@ -1215,35 +1084,35 @@ def test_begin_tasks_no_newly_started_tasks( @mock.patch.object(server.ConductorServerEndpoint, "_create_task") @mock.patch.object( - server.ConductorServerEndpoint, "_check_replica_running_executions" + server.ConductorServerEndpoint, "_check_transfer_running_executions" ) - @mock.patch.object(server.ConductorServerEndpoint, "_get_replica") - def test_delete_replica_disks_invalid_state( - self, mock_get_replica, - mock_check_replica_running, mock_create_task + @mock.patch.object(server.ConductorServerEndpoint, "_get_transfer") + def test_delete_transfer_disks_invalid_state( + self, mock_get_transfer, + mock_check_transfer_running, mock_create_task ): - mock_replica = mock_get_replica.return_value - mock_replica.instances = [mock.sentinel.instance] - mock_replica.info = {} - delete_replica_disks = testutils.get_wrapped_function( - self.server.delete_replica_disks + mock_transfer = mock_get_transfer.return_value + mock_transfer.instances = [mock.sentinel.instance] + mock_transfer.info = {} + delete_transfer_disks = testutils.get_wrapped_function( + self.server.delete_transfer_disks ) self.assertRaises( - exception.InvalidReplicaState, - delete_replica_disks, + exception.InvalidTransferState, + delete_transfer_disks, self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True, ) - mock_check_replica_running.assert_called_once_with( - mock.sentinel.context, mock_replica + mock_check_transfer_running.assert_called_once_with( + mock.sentinel.context, mock_transfer ) mock_create_task.assert_not_called() @@ -1264,7 +1133,7 @@ def convert_to_task(task_config): instance_task.depends_on = task_config.get("depends_on", None) instance_task.task_type = task_config.get( "task_type", - constants.TASK_TYPE_DEPLOY_MIGRATION_SOURCE_RESOURCES, + constants.TASK_TYPE_DEPLOY_TRANSFER_SOURCE_RESOURCES, ) return instance_task @@ -1307,7 +1176,7 @@ def convert_to_task(task_config): @mock.patch.object(copy, "deepcopy") @mock.patch.object( server.ConductorServerEndpoint, - "get_replica_tasks_execution" + "get_transfer_tasks_execution" ) @mock.patch.object( server.ConductorServerEndpoint, @@ -1321,7 +1190,7 @@ def convert_to_task(task_config): server.ConductorServerEndpoint, "_minion_manager_client" ) - @mock.patch.object(db_api, "add_replica_tasks_execution") + @mock.patch.object(db_api, "add_transfer_tasks_execution") @mock.patch.object(db_api, "update_transfer_action_info_for_instance") @mock.patch.object( server.ConductorServerEndpoint, @@ -1339,7 +1208,7 @@ def convert_to_task(task_config): ) @mock.patch.object( server.ConductorServerEndpoint, - "_check_replica_running_executions" + "_check_transfer_running_executions" ) @mock.patch.object( server.ConductorServerEndpoint, @@ -1347,26 +1216,26 @@ def convert_to_task(task_config): ) @mock.patch.object( server.ConductorServerEndpoint, - "_get_replica" + "_get_transfer" ) - @ddt.file_data("data/execute_replica_tasks_config.yml") + @ddt.file_data("data/execute_transfer_tasks_config.yml") @ddt.unpack - def test_execute_replica_tasks( + def test_execute_transfer_tasks( self, - mock_get_replica, + mock_get_transfer, mock_check_reservation, - mock_check_replica_running_executions, + mock_check_transfer_running_executions, mock_check_minion_pools_for_action, mock_tasks_execution, mock_uuid4, mock_create_task, mock_check_execution_tasks_sanity, mock_update_transfer_action_info_for_instance, - mock_add_replica_tasks_execution, + mock_add_transfer_tasks_execution, mock_minion_manager_client, mock_set_tasks_execution_status, mock_begin_tasks, - mock_get_replica_tasks_execution, + mock_get_transfer_tasks_execution, mock_deepcopy, config, expected_tasks, @@ -1375,17 +1244,17 @@ def test_execute_replica_tasks( has_target_minion_pool = config.get("target_minion_pool", False) shutdown_instances = config.get("shutdown_instances", False) - def call_execute_replica_tasks(): + def call_execute_transfer_tasks(): return testutils\ - .get_wrapped_function(self.server.execute_replica_tasks)( + .get_wrapped_function(self.server.execute_transfer_tasks)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, shutdown_instances, # type: ignore ) instances = [mock.sentinel.instance1, mock.sentinel.instance2] - mock_replica = mock.Mock( + mock_transfer = mock.Mock( instances=instances, network_map=mock.sentinel.network_map, info={mock.sentinel.instance1: {'volume_info': None}}, @@ -1394,7 +1263,7 @@ def call_execute_replica_tasks(): destination_minion_pool_id=mock.sentinel.destination_minion_pool_id if has_target_minion_pool else None, ) - mock_get_replica.return_value = mock_replica + mock_get_transfer.return_value = mock_transfer def create_task_side_effect( instance, @@ -1416,40 +1285,37 @@ def create_task_side_effect( mock_create_task.side_effect = create_task_side_effect - result = call_execute_replica_tasks() - mock_get_replica.assert_called_once_with( + result = call_execute_transfer_tasks() + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True, ) - mock_check_reservation.assert_called_once_with( - mock_replica, - licensing_client.RESERVATION_TYPE_REPLICA - ) - mock_check_replica_running_executions.assert_called_once_with( - mock.sentinel.context, mock_replica) + mock_check_reservation.assert_called_once_with(mock_transfer) + mock_check_transfer_running_executions.assert_called_once_with( + mock.sentinel.context, mock_transfer) mock_check_minion_pools_for_action.assert_called_once_with( - mock.sentinel.context, mock_replica) + mock.sentinel.context, mock_transfer) mock_deepcopy.assert_called_once_with( - mock_replica.destination_environment) + mock_transfer.destination_environment) for instance in instances: - assert instance in mock_replica.info + assert instance in mock_transfer.info self.assertEqual( - mock_replica.info[instance]['source_environment'], - mock_replica.source_environment) + mock_transfer.info[instance]['source_environment'], + mock_transfer.source_environment) self.assertEqual( - mock_replica.info[instance]['target_environment'], + mock_transfer.info[instance]['target_environment'], mock_deepcopy.return_value) # generic tasks mock_create_task.assert_has_calls([ mock.call( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_SOURCE_INPUTS, + constants.TASK_TYPE_VALIDATE_TRANSFER_SOURCE_INPUTS, mock_tasks_execution.return_value), mock.call( instance, @@ -1457,7 +1323,7 @@ def create_task_side_effect( mock_tasks_execution.return_value), mock.call( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_DESTINATION_INPUTS, + constants.TASK_TYPE_VALIDATE_TRANSFER_DESTINATION_INPUTS, mock_tasks_execution.return_value, depends_on=[constants.TASK_TYPE_GET_INSTANCE_INFO]), ]) @@ -1480,25 +1346,25 @@ def create_task_side_effect( mock_update_transfer_action_info_for_instance.assert_has_calls([ mock.call( mock.sentinel.context, - mock_replica.id, + mock_transfer.id, instance, - mock_replica.info[instance], + mock_transfer.info[instance], ) ]) mock_check_execution_tasks_sanity.assert_called_once_with( mock_tasks_execution.return_value, - mock_replica.info) + mock_transfer.info) - mock_add_replica_tasks_execution.assert_called_once_with( + mock_add_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, mock_tasks_execution.return_value) if any([has_origin_minion_pool, has_target_minion_pool]): mock_minion_manager_client\ - .allocate_minion_machines_for_replica.assert_called_once_with( + .allocate_minion_machines_for_transfer.assert_called_once_with( mock.sentinel.context, - mock_replica, + mock_transfer, ) mock_set_tasks_execution_status.assert_called_once_with( mock.sentinel.context, @@ -1508,13 +1374,13 @@ def create_task_side_effect( else: mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - mock_replica, + mock_transfer, mock_tasks_execution.return_value, ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock_tasks_execution.return_value.id) self.assertEqual( @@ -1522,57 +1388,57 @@ def create_task_side_effect( constants.EXECUTION_STATUS_UNEXECUTED) self.assertEqual( mock_tasks_execution.return_value.type, - constants.EXECUTION_TYPE_REPLICA_EXECUTION) + constants.EXECUTION_TYPE_TRANSFER_EXECUTION) self.assertEqual( - result, mock_get_replica_tasks_execution.return_value) + result, mock_get_transfer_tasks_execution.return_value) - @mock.patch.object(db_api, "get_replica_tasks_executions") - def test_get_replica_tasks_executions( + @mock.patch.object(db_api, "get_transfer_tasks_executions") + def test_get_transfer_tasks_executions( self, - mock_get_replica_tasks_executions + mock_get_transfer_tasks_executions ): result = testutils.get_wrapped_function( - self.server.get_replica_tasks_executions)( + self.server.get_transfer_tasks_executions)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False ) self.assertEqual( - mock_get_replica_tasks_executions.return_value, + mock_get_transfer_tasks_executions.return_value, result ) - mock_get_replica_tasks_executions.assert_called_once_with( + mock_get_transfer_tasks_executions.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=True ) - @mock.patch.object(db_api, "get_replica_tasks_execution") - def test_get_replica_tasks_execution( + @mock.patch.object(db_api, "get_transfer_tasks_execution") + def test_get_transfer_tasks_execution( self, - mock_get_replica_tasks_execution + mock_get_transfer_tasks_execution ): result = testutils.get_wrapped_function( - self.server.get_replica_tasks_execution)( + self.server.get_transfer_tasks_execution)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False ) self.assertEqual( - mock_get_replica_tasks_execution.return_value, + mock_get_transfer_tasks_execution.return_value, result ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=True @@ -1580,261 +1446,261 @@ def test_get_replica_tasks_execution( @mock.patch.object( server.ConductorServerEndpoint, - '_get_replica_tasks_execution' + '_get_transfer_tasks_execution' ) - @mock.patch.object(db_api, 'delete_replica_tasks_execution') - def test_delete_replica_tasks_execution( + @mock.patch.object(db_api, 'delete_transfer_tasks_execution') + def test_delete_transfer_tasks_execution( self, - mock_delete_replica_tasks_execution, - mock_get_replica_tasks_execution + mock_delete_transfer_tasks_execution, + mock_get_transfer_tasks_execution ): - def call_delete_replica_tasks_execution(): + def call_delete_transfer_tasks_execution(): return testutils.get_wrapped_function( - self.server.delete_replica_tasks_execution)( + self.server.delete_transfer_tasks_execution)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, # type: ignore ) - call_delete_replica_tasks_execution() - mock_get_replica_tasks_execution.assert_called_once_with( + call_delete_transfer_tasks_execution() + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id) - mock_delete_replica_tasks_execution.assert_called_once_with( + mock_delete_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, mock.sentinel.execution_id) # raises exception if status is active - mock_get_replica_tasks_execution.return_value.status = constants\ - .EXECUTION_STATUS_RUNNING + mock_get_transfer_tasks_execution.return_value.status = ( + constants.EXECUTION_STATUS_RUNNING) self.assertRaises( - exception.InvalidMigrationState, - call_delete_replica_tasks_execution) + exception.InvalidActionTasksExecutionState, + call_delete_transfer_tasks_execution) @mock.patch.object(server.ConductorServerEndpoint, - '_get_replica_tasks_execution') + '_get_transfer_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') - def test_cancel_replica_tasks_execution( + def test_cancel_transfer_tasks_execution( self, - mock_cancel_replica_tasks_execution, - mock_get_replica_tasks_execution + mock_cancel_transfer_tasks_execution, + mock_get_transfer_tasks_execution ): - mock_get_replica_tasks_execution.return_value.status = constants\ + mock_get_transfer_tasks_execution.return_value.status = constants\ .EXECUTION_STATUS_RUNNING testutils.get_wrapped_function( - self.server.cancel_replica_tasks_execution)( + self.server.cancel_transfer_tasks_execution)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, False ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id) - mock_cancel_replica_tasks_execution.assert_called_once_with( + mock_cancel_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_get_replica_tasks_execution.return_value, + mock_get_transfer_tasks_execution.return_value, force=False) - mock_get_replica_tasks_execution.reset_mock() - mock_cancel_replica_tasks_execution.reset_mock() - mock_get_replica_tasks_execution.return_value.status = constants\ + mock_get_transfer_tasks_execution.reset_mock() + mock_cancel_transfer_tasks_execution.reset_mock() + mock_get_transfer_tasks_execution.return_value.status = constants\ .EXECUTION_STATUS_CANCELLING testutils.get_wrapped_function( - self.server.cancel_replica_tasks_execution)( + self.server.cancel_transfer_tasks_execution)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, True ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id) - mock_cancel_replica_tasks_execution.assert_called_once_with( + mock_cancel_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_get_replica_tasks_execution.return_value, + mock_get_transfer_tasks_execution.return_value, force=True) @mock.patch.object(server.ConductorServerEndpoint, - '_get_replica_tasks_execution') + '_get_transfer_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') - def test_cancel_replica_tasks_execution_status_not_active( + def test_cancel_transfer_tasks_execution_status_not_active( self, - mock_cancel_replica_tasks_execution, - mock_get_replica_tasks_execution + mock_cancel_transfer_tasks_execution, + mock_get_transfer_tasks_execution ): self.assertRaises( - exception.InvalidReplicaState, + exception.InvalidTransferState, testutils.get_wrapped_function( - self.server.cancel_replica_tasks_execution), + self.server.cancel_transfer_tasks_execution), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, False ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id) - mock_cancel_replica_tasks_execution.assert_not_called() + mock_cancel_transfer_tasks_execution.assert_not_called() @mock.patch.object(server.ConductorServerEndpoint, - '_get_replica_tasks_execution') + '_get_transfer_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') - def test_cancel_replica_tasks_execution_status_cancelling_no_force( + def test_cancel_transfer_tasks_execution_status_cancelling_no_force( self, - mock_cancel_replica_tasks_execution, - mock_get_replica_tasks_execution + mock_cancel_transfer_tasks_execution, + mock_get_transfer_tasks_execution ): - mock_get_replica_tasks_execution.return_value.status = constants\ + mock_get_transfer_tasks_execution.return_value.status = constants\ .EXECUTION_STATUS_CANCELLING self.assertRaises( - exception.InvalidReplicaState, + exception.InvalidTransferState, testutils.get_wrapped_function( - self.server.cancel_replica_tasks_execution), + self.server.cancel_transfer_tasks_execution), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, False ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id) - mock_cancel_replica_tasks_execution.assert_not_called() + mock_cancel_transfer_tasks_execution.assert_not_called() - @mock.patch.object(db_api, 'get_replica_tasks_execution') - def test__get_replica_tasks_execution( + @mock.patch.object(db_api, 'get_transfer_tasks_execution') + def test__get_transfer_tasks_execution( self, - mock_get_replica_tasks_execution + mock_get_transfer_tasks_execution ): - result = self.server._get_replica_tasks_execution( + result = self.server._get_transfer_tasks_execution( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=False, ) self.assertEqual( - mock_get_replica_tasks_execution.return_value, + mock_get_transfer_tasks_execution.return_value, result ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=False) - @mock.patch.object(db_api, 'get_replica_tasks_execution') - def test__get_replica_tasks_execution_no_execution( + @mock.patch.object(db_api, 'get_transfer_tasks_execution') + def test__get_transfer_tasks_execution_no_execution( self, - mock_get_replica_tasks_execution + mock_get_transfer_tasks_execution ): - mock_get_replica_tasks_execution.return_value = None + mock_get_transfer_tasks_execution.return_value = None self.assertRaises( exception.NotFound, - self.server._get_replica_tasks_execution, + self.server._get_transfer_tasks_execution, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=False, ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.execution_id, include_task_info=False, to_dict=False) - @mock.patch.object(db_api, 'get_replicas') - def test_get_replicas(self, mock_get_replicas): - result = self.server.get_replicas( + @mock.patch.object(db_api, 'get_transfers') + def test_get_transfers(self, mock_get_transfers): + result = self.server.get_transfers( mock.sentinel.context, include_tasks_executions=False, include_task_info=False ) self.assertEqual( - mock_get_replicas.return_value, + mock_get_transfers.return_value, result ) - mock_get_replicas.assert_called_once_with( + mock_get_transfers.assert_called_once_with( mock.sentinel.context, False, include_task_info=False, to_dict=True ) - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_get_replica(self, mock_get_replica): - result = testutils.get_wrapped_function(self.server.get_replica)( + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_get_transfer(self, mock_get_transfer): + result = testutils.get_wrapped_function(self.server.get_transfer)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=False ) self.assertEqual( - mock_get_replica.return_value, + mock_get_transfer.return_value, result ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=False, to_dict=True ) - @mock.patch.object(db_api, 'delete_replica') + @mock.patch.object(db_api, 'delete_transfer') @mock.patch.object(server.ConductorServerEndpoint, '_check_delete_reservation_for_transfer') @mock.patch.object(server.ConductorServerEndpoint, - '_check_replica_running_executions') - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_delete_replica( + '_check_transfer_running_executions') + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_delete_transfer( self, - mock_get_replica, - mock_check_replica_running_executions, + mock_get_transfer, + mock_check_transfer_running_executions, mock_check_delete_reservation_for_transfer, - mock_delete_replica, + mock_delete_transfer, ): - testutils.get_wrapped_function(self.server.delete_replica)( + testutils.get_wrapped_function(self.server.delete_transfer)( self.server, mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_get_replica.assert_called_once_with( - mock.sentinel.context, mock.sentinel.replica_id) - mock_check_replica_running_executions.assert_called_once_with( - mock.sentinel.context, mock_get_replica.return_value) + mock_get_transfer.assert_called_once_with( + mock.sentinel.context, mock.sentinel.transfer_id) + mock_check_transfer_running_executions.assert_called_once_with( + mock.sentinel.context, mock_get_transfer.return_value) mock_check_delete_reservation_for_transfer.assert_called_once_with( - mock_get_replica.return_value) - mock_delete_replica.assert_called_once_with( - mock.sentinel.context, mock.sentinel.replica_id) + mock_get_transfer.return_value) + mock_delete_transfer.assert_called_once_with( + mock.sentinel.context, mock.sentinel.transfer_id) @mock.patch.object( server.ConductorServerEndpoint, - 'get_replica_tasks_execution' + 'get_transfer_tasks_execution' ) @mock.patch.object( server.ConductorServerEndpoint, '_begin_tasks' ) - @mock.patch.object(db_api, "add_replica_tasks_execution") + @mock.patch.object(db_api, "add_transfer_tasks_execution") @mock.patch.object(db_api, "update_transfer_action_info_for_instance") @mock.patch.object( server.ConductorServerEndpoint, @@ -1849,37 +1715,37 @@ def test_delete_replica( @mock.patch.object(models, "TasksExecution") @mock.patch.object( server.ConductorServerEndpoint, - '_check_replica_running_executions' + '_check_transfer_running_executions' ) @mock.patch.object( server.ConductorServerEndpoint, - '_get_replica' + '_get_transfer' ) - def test_delete_replica_disks( + def test_delete_transfer_disks( self, - mock_get_replica, - mock_check_replica_running_executions, + mock_get_transfer, + mock_check_transfer_running_executions, mock_tasks_execution, mock_uuid4, mock_create_task, mock_deepcopy, mock_check_execution_tasks_sanity, mock_update_transfer_action_info_for_instance, - mock_add_replica_tasks_execution, + mock_add_transfer_tasks_execution, mock_begin_tasks, - mock_get_replica_tasks_execution, + mock_get_transfer_tasks_execution, ): - def call_delete_replica_disks(): + def call_delete_transfer_disks(): return testutils.get_wrapped_function( - self.server.delete_replica_disks)( + self.server.delete_transfer_disks)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, # type: ignore + mock.sentinel.transfer_id, # type: ignore ) instances = [mock.Mock(), mock.Mock()] - mock_replica = mock.Mock( + mock_transfer = mock.Mock( instances=instances, - id=mock.sentinel.replica_id, + id=mock.sentinel.transfer_id, network_map=mock.sentinel.network_map, info={ instance: instance @@ -1903,17 +1769,17 @@ def create_task_side_effect( mock_create_task.side_effect = create_task_side_effect - mock_get_replica.return_value = mock_replica - result = call_delete_replica_disks() + mock_get_transfer.return_value = mock_transfer + result = call_delete_transfer_disks() - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_check_replica_running_executions.assert_called_once_with( + mock_check_transfer_running_executions.assert_called_once_with( mock.sentinel.context, - mock_replica + mock_transfer ) self.assertEqual( @@ -1922,25 +1788,25 @@ def create_task_side_effect( ) self.assertEqual( mock_tasks_execution.return_value.type, - constants.EXECUTION_TYPE_REPLICA_DISKS_DELETE + constants.EXECUTION_TYPE_TRANSFER_DISKS_DELETE ) for instance in instances: - assert instance in mock_replica.info + assert instance in mock_transfer.info mock_create_task.assert_has_calls([ mock.call( instance, - constants.TASK_TYPE_DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS, + constants.TASK_TYPE_DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS, mock_tasks_execution.return_value, ), mock.call( instance, - constants.TASK_TYPE_DELETE_REPLICA_DISKS, + constants.TASK_TYPE_DELETE_TRANSFER_DISKS, mock_tasks_execution.return_value, depends_on=[ constants - .TASK_TYPE_DELETE_REPLICA_SOURCE_DISK_SNAPSHOTS + .TASK_TYPE_DELETE_TRANSFER_SOURCE_DISK_SNAPSHOTS ], ), ]) @@ -1948,52 +1814,52 @@ def create_task_side_effect( mock_update_transfer_action_info_for_instance\ .assert_has_calls([mock.call( mock.sentinel.context, - mock_replica.id, + mock_transfer.id, instance, - mock_replica.info[instance], + mock_transfer.info[instance], )]) mock_deepcopy.assert_called_once_with( - mock_replica.destination_environment) + mock_transfer.destination_environment) mock_check_execution_tasks_sanity.assert_called_once_with( mock_tasks_execution.return_value, - mock_replica.info, + mock_transfer.info, ) - mock_add_replica_tasks_execution.assert_called_once_with( + mock_add_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, mock_tasks_execution.return_value ) mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - mock_replica, + mock_transfer, mock_tasks_execution.return_value ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_replica.id, + mock_transfer.id, mock_tasks_execution.return_value.id ) self.assertEqual( - result, mock_get_replica_tasks_execution.return_value) + result, mock_get_transfer_tasks_execution.return_value) # raises exception if instances have no volumes info instances[0].get.return_value = None instances[1].get.return_value = None self.assertRaises( - exception.InvalidReplicaState, - call_delete_replica_disks + exception.InvalidTransferState, + call_delete_transfer_disks ) - # raises exception if instance not in replica.info + # raises exception if instance not in transfer.info instances[0].get.return_value = mock.sentinel.volume_info instances[1].get.return_value = mock.sentinel.volume_info - mock_replica.info = {} + mock_transfer.info = {} self.assertRaises( - exception.InvalidReplicaState, - call_delete_replica_disks + exception.InvalidTransferState, + call_delete_transfer_disks ) def test_check_endpoints(self): @@ -2030,30 +1896,31 @@ def test_check_endpoints_same_destination_connection_info(self): destination_endpoint ) - @mock.patch.object(server.ConductorServerEndpoint, 'get_replica') - @mock.patch.object(db_api, 'add_replica') + @mock.patch.object(server.ConductorServerEndpoint, 'get_transfer') + @mock.patch.object(db_api, 'add_transfer') @mock.patch.object(server.ConductorServerEndpoint, - '_check_create_reservation_for_transfer') + '_create_reservation_for_transfer') @mock.patch.object(server.ConductorServerEndpoint, '_check_minion_pools_for_action') - @mock.patch.object(models, 'Replica') + @mock.patch.object(models, 'Transfer') @mock.patch.object(server.ConductorServerEndpoint, '_check_endpoints') @mock.patch.object(server.ConductorServerEndpoint, 'get_endpoint') - def test_create_instances_replica( + def test_create_instances_transfer( self, mock_get_endpoint, mock_check_endpoints, - mock_replica, + mock_transfer, mock_check_minion_pools_for_action, - mock_check_create_reservation_for_transfer, - mock_add_replica, - mock_get_replica + mock_create_reservation_for_transfer, + mock_add_transfer, + mock_get_transfer ): mock_get_endpoint.side_effect = mock.sentinel.origin_endpoint_id, \ mock.sentinel.destination_endpoint_id - mock_replica.return_value = mock.Mock() - result = self.server.create_instances_replica( + mock_transfer.return_value = mock.Mock() + result = self.server.create_instances_transfer( mock.sentinel.context, + constants.TRANSFER_SCENARIO_REPLICA, mock.sentinel.origin_endpoint_id, mock.sentinel.destination_endpoint_id, mock.sentinel.origin_minion_pool_id, @@ -2068,7 +1935,7 @@ def test_create_instances_replica( user_scripts=None ) self.assertEqual( - mock_get_replica.return_value, + mock_get_transfer.return_value, result ) mock_get_endpoint.assert_has_calls([ @@ -2082,18 +1949,18 @@ def test_create_instances_replica( ) self.assertEqual( ( - mock_replica.return_value.origin_endpoint_id, - mock_replica.return_value.destination_endpoint_id, - mock_replica.return_value.destination_endpoint_id, - mock_replica.return_value.origin_minion_pool_id, - mock_replica.return_value.destination_minion_pool_id, - (mock_replica.return_value. - instance_osmorphing_minion_pool_mappings), - mock_replica.return_value.source_environment, - mock_replica.return_value.destination_environment, - mock_replica.return_value.info, - mock_replica.return_value.notes, - mock_replica.return_value.user_scripts), + mock_transfer.return_value.origin_endpoint_id, + mock_transfer.return_value.destination_endpoint_id, + mock_transfer.return_value.destination_endpoint_id, + mock_transfer.return_value.origin_minion_pool_id, + mock_transfer.return_value.destination_minion_pool_id, + (mock_transfer.return_value. + instance_osmorphing_minion_pool_mappings), + mock_transfer.return_value.source_environment, + mock_transfer.return_value.destination_environment, + mock_transfer.return_value.info, + mock_transfer.return_value.notes, + mock_transfer.return_value.user_scripts), ( mock.sentinel.origin_endpoint_id, mock.sentinel.destination_endpoint_id, @@ -2110,137 +1977,117 @@ def test_create_instances_replica( {}) ) mock_check_minion_pools_for_action.assert_called_once_with( - mock.sentinel.context, mock_replica.return_value) - mock_check_create_reservation_for_transfer.assert_called_once_with( - mock_replica.return_value, - licensing_client.RESERVATION_TYPE_REPLICA - ) - mock_add_replica.assert_called_once_with( - mock.sentinel.context, mock_replica.return_value) - mock_get_replica.assert_called_once_with( - mock.sentinel.context, mock_replica.return_value.id) - - @mock.patch.object(db_api, 'get_replica') - def test__get_replica(self, mock_get_replica): - result = self.server._get_replica( - mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.context, mock_transfer.return_value) + mock_create_reservation_for_transfer.assert_called_once_with( + mock_transfer.return_value) + mock_add_transfer.assert_called_once_with( + mock.sentinel.context, mock_transfer.return_value) + mock_get_transfer.assert_called_once_with( + mock.sentinel.context, mock_transfer.return_value.id) + + @mock.patch.object(db_api, 'get_transfer') + def test__get_transfer(self, mock_get_transfer): + result = self.server._get_transfer( + mock.sentinel.context, + mock.sentinel.transfer_id, include_task_info=False, to_dict=False ) self.assertEqual( - mock_get_replica.return_value, + mock_get_transfer.return_value, result ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=False, to_dict=False ) - @mock.patch.object(db_api, 'get_replica') - def test__get_replica_not_found(self, mock_get_replica): - mock_get_replica.return_value = None + @mock.patch.object(db_api, 'get_transfer') + def test__get_transfer_not_found(self, mock_get_transfer): + mock_get_transfer.return_value = None self.assertRaises( exception.NotFound, - self.server._get_replica, + self.server._get_transfer, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=False, to_dict=False ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=False, to_dict=False ) - @mock.patch.object(db_api, 'get_migrations') - def test_get_migrations(self, mock_get_migrations): - result = self.server.get_migrations( - mock.sentinel.context, - mock.sentinel.migration_id, - include_task_info=False - ) - self.assertEqual( - mock_get_migrations.return_value, - result - ) - mock_get_migrations.assert_called_once_with( - mock.sentinel.context, - mock.sentinel.migration_id, - include_task_info=False, - to_dict=True - ) - - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_get_migration(self, mock_get_migration): - result = testutils.get_wrapped_function(self.server.get_migration)( + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_get_deployment(self, mock_get_deployment): + result = testutils.get_wrapped_function(self.server.get_deployment)( self.server, mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False ) self.assertEqual( - mock_get_migration.return_value, + mock_get_deployment.return_value, result ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False, to_dict=True ) - @mock.patch.object(db_api, 'get_replica_migrations') - def test_check_running_replica_migrations( + @mock.patch.object(db_api, 'get_transfer_deployments') + def test_check_running_transfer_deployments( self, - mock_get_replica_migrations + mock_get_transfer_deployments ): - migration_1 = mock.Mock() - migration_2 = mock.Mock() - migration_1.executions = [mock.Mock()] - migration_1.executions[0].status = \ + deployment_1 = mock.Mock() + deployment_2 = mock.Mock() + deployment_1.executions = [mock.Mock()] + deployment_1.executions[0].status = \ constants.EXECUTION_STATUS_COMPLETED - migration_2.executions = [mock.Mock()] - migration_2.executions[0].status = \ + deployment_2.executions = [mock.Mock()] + deployment_2.executions[0].status = \ constants.EXECUTION_STATUS_ERROR - migrations = [migration_1, migration_2] - mock_get_replica_migrations.return_value = migrations - self.server._check_running_replica_migrations( + deployments = [deployment_1, deployment_2] + mock_get_transfer_deployments.return_value = deployments + self.server._check_running_transfer_deployments( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) - mock_get_replica_migrations.assert_called_once_with( + mock_get_transfer_deployments.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) - @mock.patch.object(db_api, 'get_replica_migrations') - def test_check_running_replica_migrations_invalid_replica_state( + @mock.patch.object(db_api, 'get_transfer_deployments') + def test_check_running_transfer_deployments_invalid_transfer_state( self, - mock_get_replica_migrations + mock_get_transfer_deployments ): - migration_1 = mock.Mock() - migration_2 = mock.Mock() - migration_1.executions = [mock.Mock()] - migration_1.executions[0].status = constants.EXECUTION_STATUS_RUNNING - migration_2.executions = [mock.Mock()] - migration_2.executions[0].status = \ + deployment_1 = mock.Mock() + deployment_2 = mock.Mock() + deployment_1.executions = [mock.Mock()] + deployment_1.executions[0].status = constants.EXECUTION_STATUS_RUNNING + deployment_2.executions = [mock.Mock()] + deployment_2.executions[0].status = \ constants.EXECUTION_STATUS_COMPLETED - migrations = [migration_1, migration_2] - mock_get_replica_migrations.return_value = migrations + deployments = [deployment_1, deployment_2] + mock_get_transfer_deployments.return_value = deployments self.assertRaises( - exception.InvalidReplicaState, - self.server._check_running_replica_migrations, + exception.InvalidTransferState, + self.server._check_running_transfer_deployments, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) - mock_get_replica_migrations.assert_called_once_with( + mock_get_transfer_deployments.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) def test_check_running_executions(self): @@ -2266,42 +2113,42 @@ def test_check_running_executions_invalid_state(self): ) @mock.patch.object(server.ConductorServerEndpoint, - '_check_running_replica_migrations') + '_check_running_transfer_deployments') @mock.patch.object(server.ConductorServerEndpoint, '_check_running_executions') - def test_check_replica_running_executions( + def test_check_transfer_running_executions( self, mock_check_running_executions, - mock_check_running_replica_migrations + mock_check_running_transfer_deployments ): - replica = mock.Mock() - self.server._check_replica_running_executions( + transfer = mock.Mock() + self.server._check_transfer_running_executions( mock.sentinel.context, - replica + transfer ) - mock_check_running_executions.assert_called_once_with(replica) - mock_check_running_replica_migrations.assert_called_once_with( + mock_check_running_executions.assert_called_once_with(transfer) + mock_check_running_transfer_deployments.assert_called_once_with( mock.sentinel.context, - replica.id + transfer.id ) - def test_check_valid_replica_tasks_execution(self): + def test_check_valid_transfer_tasks_execution(self): execution1 = mock.Mock( number=1, - type=constants.EXECUTION_TYPE_REPLICA_EXECUTION, + type=constants.EXECUTION_TYPE_TRANSFER_EXECUTION, status=constants.EXECUTION_STATUS_COMPLETED, ) execution2 = mock.Mock( number=2, - type=constants.EXECUTION_TYPE_REPLICA_EXECUTION, + type=constants.EXECUTION_TYPE_TRANSFER_EXECUTION, status=constants.EXECUTION_STATUS_COMPLETED, ) - mock_replica = mock.Mock( + mock_transfer = mock.Mock( executions=[execution1, execution2] ) - self.server._check_valid_replica_tasks_execution( - mock_replica + self.server._check_valid_transfer_tasks_execution( + mock_transfer ) # raises exception if all executions are incomplete @@ -2309,15 +2156,15 @@ def test_check_valid_replica_tasks_execution(self): execution2.status = constants.EXECUTION_STATUS_UNEXECUTED self.assertRaises( - exception.InvalidReplicaState, - self.server._check_valid_replica_tasks_execution, - mock_replica + exception.InvalidTransferState, + self.server._check_valid_transfer_tasks_execution, + mock_transfer ) # doesn't raise exception if all executions are incomplete # and is forced - self.server._check_valid_replica_tasks_execution( - mock_replica, + self.server._check_valid_transfer_tasks_execution( + mock_transfer, True ) @@ -2325,16 +2172,16 @@ def test_check_valid_replica_tasks_execution(self): execution1.status = constants.EXECUTION_STATUS_COMPLETED execution2.status = constants.EXECUTION_STATUS_UNEXECUTED - self.server._check_valid_replica_tasks_execution( - mock_replica + self.server._check_valid_transfer_tasks_execution( + mock_transfer ) - mock_replica.executions = [] + mock_transfer.executions = [] self.assertRaises( - exception.InvalidReplicaState, - self.server._check_valid_replica_tasks_execution, - mock_replica + exception.InvalidTransferState, + self.server._check_valid_transfer_tasks_execution, + mock_transfer ) @mock.patch.object(server.ConductorServerEndpoint, @@ -2370,7 +2217,7 @@ def test_get_provider_types(self, mock_get_available_providers): @mock.patch.object( server.ConductorServerEndpoint, - '_get_replica' + '_get_transfer' ) @mock.patch.object( server.ConductorServerEndpoint, @@ -2378,11 +2225,11 @@ def test_get_provider_types(self, mock_get_available_providers): ) @mock.patch.object( server.ConductorServerEndpoint, - '_check_replica_running_executions' + '_check_transfer_running_executions' ) @mock.patch.object( server.ConductorServerEndpoint, - '_check_valid_replica_tasks_execution' + '_check_valid_transfer_tasks_execution' ) @mock.patch.object( server.ConductorServerEndpoint, @@ -2392,8 +2239,8 @@ def test_get_provider_types(self, mock_get_available_providers): server.ConductorServerEndpoint, '_get_provider_types' ) - @mock.patch.object(models, "Migration") - @mock.patch.object(uuid, "uuid4", return_value="migration_id") + @mock.patch.object(models, "Deployment") + @mock.patch.object(uuid, "uuid4", return_value="deployment_id") @mock.patch.object(copy, "deepcopy") @mock.patch.object( server.ConductorServerEndpoint, @@ -2412,7 +2259,7 @@ def test_get_provider_types(self, mock_get_available_providers): server.ConductorServerEndpoint, '_check_execution_tasks_sanity' ) - @mock.patch.object(db_api, 'add_migration') + @mock.patch.object(db_api, 'add_deployment') @mock.patch.object(lockutils, 'lock') @mock.patch.object( server.ConductorServerEndpoint, @@ -2428,18 +2275,18 @@ def test_get_provider_types(self, mock_get_available_providers): ) @mock.patch.object( server.ConductorServerEndpoint, - "get_migration" + "get_deployment" ) - @ddt.file_data("data/deploy_replica_instance_config.yml") + @ddt.file_data("data/deploy_transfer_instance_config.yml") @ddt.unpack - def test_deploy_replica_instance( + def test_deploy_transfer_instance( self, - mock_get_migration, + mock_get_deployment, mock_begin_tasks, mock_set_tasks_execution_status, mock_minion_manager_client, mock_lock, - mock_add_migration, + mock_add_deployment, mock_check_execution_tasks_sanity, mock_create_task, mock_get_instance_scripts, @@ -2447,13 +2294,13 @@ def test_deploy_replica_instance( mock_check_minion_pools_for_action, mock_deepcopy, mock_uuid4, - mock_migration, + mock_deployment, mock_get_provider_types, mock_get_endpoint, - mock_check_valid_replica_tasks_execution, - mock_check_replica_running_executions, + mock_check_valid_transfer_tasks_execution, + mock_check_transfer_running_executions, mock_check_reservation_for_transfer, - mock_get_replica, + mock_get_transfer, config, expected_tasks, ): @@ -2474,7 +2321,7 @@ def test_deploy_replica_instance( mock.sentinel.instance2: mock.sentinel.pool2, } - mock_get_replica.return_value = mock.Mock( + mock_get_transfer.return_value = mock.Mock( instances=[mock.sentinel.instance1, mock.sentinel.instance2], info={ mock.sentinel.instance1: { @@ -2487,10 +2334,10 @@ def test_deploy_replica_instance( instance_osmorphing_minion_pool_mappings={} ) - def call_deploy_replica_instance(): - return self.server.deploy_replica_instances( + def call_deploy_transfer_instance(): + return self.server.deploy_transfer_instances( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, clone_disks=clone_disks, force=False, instance_osmorphing_minion_pool_mappings=( @@ -2501,30 +2348,26 @@ def call_deploy_replica_instance(): # One of the instances has no volumes info self.assertRaises( - exception.InvalidReplicaState, - call_deploy_replica_instance, + exception.InvalidTransferState, + call_deploy_transfer_instance, ) mock_get_endpoint.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value.destination_endpoint_id + mock_get_transfer.return_value.destination_endpoint_id ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True, ) - mock_check_reservation_for_transfer.assert_called_once_with( - mock_get_replica.return_value, - licensing_client.RESERVATION_TYPE_REPLICA - ) - mock_check_replica_running_executions.assert_called_once_with( + mock_check_transfer_running_executions.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value + mock_get_transfer.return_value ) - mock_check_valid_replica_tasks_execution.assert_called_once_with( - mock_get_replica.return_value, + mock_check_valid_transfer_tasks_execution.assert_called_once_with( + mock_get_transfer.return_value, False ) mock_get_provider_types.assert_called_once_with( @@ -2533,7 +2376,7 @@ def call_deploy_replica_instance(): ) # add the missing volumes info - mock_get_replica.return_value.info[mock.sentinel.instance2] = { + mock_get_transfer.return_value.info[mock.sentinel.instance2] = { 'volumes_info': mock.sentinel.volumes_info2 } @@ -2558,12 +2401,14 @@ def create_task_side_effect( mock_create_task.side_effect = create_task_side_effect # no longer raises exception - migration = call_deploy_replica_instance() + deployment = call_deploy_transfer_instance() mock_check_minion_pools_for_action.assert_called_once_with( mock.sentinel.context, - mock_migration.return_value + mock_deployment.return_value ) + mock_check_reservation_for_transfer.assert_called_once_with( + mock_get_transfer.return_value) self.assertEqual( mock_tasks_execution.return_value.status, @@ -2571,17 +2416,17 @@ def create_task_side_effect( ) self.assertEqual( mock_tasks_execution.return_value.type, - constants.EXECUTION_TYPE_REPLICA_DEPLOY + constants.EXECUTION_TYPE_DEPLOYMENT ) - for instance in mock_get_replica.return_value.instances: + for instance in mock_get_transfer.return_value.instances: mock_get_instance_scripts.assert_any_call( mock.sentinel.user_scripts, instance, ) mock_create_task.assert_any_call( instance, - constants.TASK_TYPE_VALIDATE_REPLICA_DEPLOYMENT_INPUTS, + constants.TASK_TYPE_VALIDATE_DEPLOYMENT_INPUTS, mock_tasks_execution.return_value, ) @@ -2604,25 +2449,25 @@ def create_task_side_effect( mock_check_execution_tasks_sanity.assert_called_once_with( mock_tasks_execution.return_value, - mock_migration.return_value.info, + mock_deployment.return_value.info, ) - mock_add_migration.assert_called_once_with( + mock_add_deployment.assert_called_once_with( mock.sentinel.context, - mock_migration.return_value, + mock_deployment.return_value, ) if not skip_os_morphing and has_os_morphing_minion: mock_lock.assert_any_call( - constants.MIGRATION_LOCK_NAME_FORMAT - % mock_migration.return_value.id, + constants.DEPLOYMENT_LOCK_NAME_FORMAT + % mock_deployment.return_value.id, external=True, ) mock_minion_manager_client\ - .allocate_minion_machines_for_migration\ + .allocate_minion_machines_for_deployment\ .assert_called_once_with( mock.sentinel.context, - mock_migration.return_value, + mock_deployment.return_value, include_transfer_minions=False, include_osmorphing_minions=True ) @@ -2634,18 +2479,18 @@ def create_task_side_effect( else: mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - mock_migration.return_value, + mock_deployment.return_value, mock_tasks_execution.return_value, ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock_migration.return_value.id, + mock_deployment.return_value.id, ) self.assertEqual( - migration, - mock_get_migration.return_value + deployment, + mock_get_deployment.return_value ) def test_get_instance_scripts( @@ -2891,167 +2736,167 @@ def test_update_task_info_for_minion_allocations( expected_action_info[mock.sentinel.instance2] ) - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_get_last_execution_for_replica( + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_get_last_execution_for_transfer( self, - mock_get_replica + mock_get_transfer ): - replica = mock.Mock() - replica.id = mock.sentinel.id + transfer = mock.Mock() + transfer.id = mock.sentinel.id execution1 = mock.Mock(id=mock.sentinel.execution_id1, number=1) execution2 = mock.Mock(id=mock.sentinel.execution_id2, number=3) execution3 = mock.Mock(id=mock.sentinel.execution_id3, number=2) - replica.executions = [execution1, execution2, execution3] - mock_get_replica.return_value = replica - result = self.server._get_last_execution_for_replica( + transfer.executions = [execution1, execution2, execution3] + mock_get_transfer.return_value = transfer + result = self.server._get_last_execution_for_transfer( mock.sentinel.context, - replica, + transfer, requery=False ) self.assertEqual( execution2, result ) - mock_get_replica.assert_not_called() - replica.executions = None + mock_get_transfer.assert_not_called() + transfer.executions = None self.assertRaises( - exception.InvalidReplicaState, - self.server._get_last_execution_for_replica, + exception.InvalidTransferState, + self.server._get_last_execution_for_transfer, mock.sentinel.context, - replica, + transfer, requery=True ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, mock.sentinel.id) - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_get_execution_for_migration( + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_get_execution_for_deployment( self, - mock_get_migration + mock_get_deployment ): - migration = mock.Mock() - migration.id = mock.sentinel.id + deployment = mock.Mock() + deployment.id = mock.sentinel.id execution1 = mock.Mock(id=mock.sentinel.execution_id1) execution2 = mock.Mock(id=mock.sentinel.execution_id2) - migration.executions = [execution1] - mock_get_migration.return_value = migration - result = self.server._get_execution_for_migration( + deployment.executions = [execution1] + mock_get_deployment.return_value = deployment + result = self.server._get_execution_for_deployment( mock.sentinel.context, - migration, + deployment, requery=False ) self.assertEqual( execution1, result ) - mock_get_migration.assert_not_called() - migration.executions = [execution1, execution2] + mock_get_deployment.assert_not_called() + deployment.executions = [execution1, execution2] self.assertRaises( - exception.InvalidMigrationState, - self.server._get_execution_for_migration, + exception.InvalidDeploymentState, + self.server._get_execution_for_deployment, mock.sentinel.context, - migration, + deployment, requery=True ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, mock.sentinel.id) - migration.executions = [] + deployment.executions = [] self.assertRaises( - exception.InvalidMigrationState, - self.server._get_execution_for_migration, + exception.InvalidDeploymentState, + self.server._get_execution_for_deployment, mock.sentinel.context, - migration, + deployment, requery=False ) @mock.patch.object(server.ConductorServerEndpoint, '_begin_tasks') - @mock.patch.object(db_api, 'get_replica_tasks_execution') + @mock.patch.object(db_api, 'get_transfer_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, '_update_task_info_for_minion_allocations') @mock.patch.object(server.ConductorServerEndpoint, - '_get_last_execution_for_replica') - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_confirm_replica_minions_allocation( + '_get_last_execution_for_transfer') + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_confirm_transfer_minions_allocation( self, - mock_get_replica, - mock_get_last_execution_for_replica, + mock_get_transfer, + mock_get_last_execution_for_transfer, mock_update_task_info_for_minion_allocations, - mock_get_replica_tasks_execution, + mock_get_transfer_tasks_execution, mock_begin_tasks ): - mock_get_replica.return_value.last_execution_status = \ + mock_get_transfer.return_value.last_execution_status = \ constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS testutils.get_wrapped_function( - self.server.confirm_replica_minions_allocation)( + self.server.confirm_transfer_minions_allocation)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_machine_allocations ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_get_last_execution_for_replica.assert_called_once_with( + mock_get_last_execution_for_transfer.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value, + mock_get_transfer.return_value, requery=False ) mock_update_task_info_for_minion_allocations.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value, + mock_get_transfer.return_value, mock.sentinel.minion_machine_allocations ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value.id, - mock_get_last_execution_for_replica.return_value.id + mock_get_transfer.return_value.id, + mock_get_last_execution_for_transfer.return_value.id ) mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value, - mock_get_replica_tasks_execution.return_value + mock_get_transfer.return_value, + mock_get_transfer_tasks_execution.return_value ) @mock.patch.object(server.ConductorServerEndpoint, '_begin_tasks') - @mock.patch.object(db_api, 'get_replica_tasks_execution') + @mock.patch.object(db_api, 'get_transfer_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, '_update_task_info_for_minion_allocations') @mock.patch.object(server.ConductorServerEndpoint, - '_get_last_execution_for_replica') - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_confirm_replica_minions_allocation_unexpected_status( + '_get_last_execution_for_transfer') + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_confirm_transfer_minions_allocation_unexpected_status( self, - mock_get_replica, - mock_get_last_execution_for_replica, + mock_get_transfer, + mock_get_last_execution_for_transfer, mock_update_task_info_for_minion_allocations, - mock_get_replica_tasks_execution, + mock_get_transfer_tasks_execution, mock_begin_tasks ): - mock_get_replica.return_value.last_execution_status = \ + mock_get_transfer.return_value.last_execution_status = \ constants.EXECUTION_STATUS_CANCELED self.assertRaises( - exception.InvalidReplicaState, + exception.InvalidTransferState, testutils.get_wrapped_function( - self.server.confirm_replica_minions_allocation), + self.server.confirm_transfer_minions_allocation), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_machine_allocations ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_get_last_execution_for_replica.assert_not_called() + mock_get_last_execution_for_transfer.assert_not_called() mock_update_task_info_for_minion_allocations.assert_not_called() - mock_get_replica_tasks_execution.assert_not_called() + mock_get_transfer_tasks_execution.assert_not_called() mock_begin_tasks.assert_not_called() @mock.patch.object(server.ConductorServerEndpoint, @@ -3059,43 +2904,43 @@ def test_confirm_replica_minions_allocation_unexpected_status( @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, - '_get_last_execution_for_replica') - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_report_replica_minions_allocation_error( + '_get_last_execution_for_transfer') + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_report_transfer_minions_allocation_error( self, - mock_get_replica, - mock_get_last_execution_for_replica, + mock_get_transfer, + mock_get_last_execution_for_transfer, mock_cancel_tasks_execution, mock_set_tasks_execution_status ): - mock_get_replica.return_value.last_execution_status = \ + mock_get_transfer.return_value.last_execution_status = \ constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS testutils.get_wrapped_function( - self.server.report_replica_minions_allocation_error)( + self.server.report_transfer_minions_allocation_error)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_allocation_error_details ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_get_last_execution_for_replica.assert_called_once_with( + mock_get_last_execution_for_transfer.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value, + mock_get_transfer.return_value, requery=False ) mock_cancel_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_get_last_execution_for_replica.return_value, + mock_get_last_execution_for_transfer.return_value, requery=True ) mock_set_tasks_execution_status.assert_called_once_with( mock.sentinel.context, - mock_get_last_execution_for_replica.return_value, + mock_get_last_execution_for_transfer.return_value, constants.EXECUTION_STATUS_ERROR_ALLOCATING_MINIONS ) @@ -3104,33 +2949,33 @@ def test_report_replica_minions_allocation_error( @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, - '_get_last_execution_for_replica') - @mock.patch.object(server.ConductorServerEndpoint, '_get_replica') - def test_report_replica_minions_allocation_error_unexpected_status( + '_get_last_execution_for_transfer') + @mock.patch.object(server.ConductorServerEndpoint, '_get_transfer') + def test_report_transfer_minions_allocation_error_unexpected_status( self, - mock_get_replica, - mock_get_last_execution_for_replica, + mock_get_transfer, + mock_get_last_execution_for_transfer, mock_cancel_tasks_execution, mock_set_tasks_execution_status ): - mock_get_replica.return_value.last_execution_status = \ + mock_get_transfer.return_value.last_execution_status = \ constants.EXECUTION_STATUS_CANCELED self.assertRaises( - exception.InvalidReplicaState, + exception.InvalidTransferState, testutils.get_wrapped_function( - self.server.report_replica_minions_allocation_error), + self.server.report_transfer_minions_allocation_error), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_allocation_error_details ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_get_last_execution_for_replica.assert_not_called() + mock_get_last_execution_for_transfer.assert_not_called() mock_cancel_tasks_execution.assert_not_called() mock_set_tasks_execution_status.assert_not_called() @@ -3138,79 +2983,79 @@ def test_report_replica_minions_allocation_error_unexpected_status( @mock.patch.object(server.ConductorServerEndpoint, '_update_task_info_for_minion_allocations') @mock.patch.object(server.ConductorServerEndpoint, - '_get_execution_for_migration') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_confirm_migration_minions_allocation( + '_get_execution_for_deployment') + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_confirm_deployment_minions_allocation( self, - mock_get_migration, - mock_get_execution_for_migration, + mock_get_deployment, + mock_get_execution_for_deployment, mock_update_task_info_for_minion_allocations, mock_begin_tasks ): - mock_get_migration.return_value.last_execution_status = \ + mock_get_deployment.return_value.last_execution_status = \ constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS testutils.get_wrapped_function( - self.server.confirm_migration_minions_allocation)( + self.server.confirm_deployment_minions_allocation)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_machine_allocations ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_get_execution_for_migration.assert_called_once_with( + mock_get_execution_for_deployment.assert_called_once_with( mock.sentinel.context, - mock_get_migration.return_value, + mock_get_deployment.return_value, requery=False ) mock_update_task_info_for_minion_allocations.assert_called_once_with( mock.sentinel.context, - mock_get_migration.return_value, + mock_get_deployment.return_value, mock.sentinel.minion_machine_allocations ) mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - mock_get_migration.return_value, - mock_get_execution_for_migration.return_value + mock_get_deployment.return_value, + mock_get_execution_for_deployment.return_value ) @mock.patch.object(server.ConductorServerEndpoint, '_begin_tasks') @mock.patch.object(server.ConductorServerEndpoint, '_update_task_info_for_minion_allocations') @mock.patch.object(server.ConductorServerEndpoint, - '_get_execution_for_migration') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_confirm_migration_minions_allocation_unexpected_status( + '_get_execution_for_deployment') + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_confirm_deployment_minions_allocation_unexpected_status( self, - mock_get_migration, - mock_get_execution_for_migration, + mock_get_deployment, + mock_get_execution_for_deployment, mock_update_task_info_for_minion_allocations, mock_begin_tasks ): - mock_get_migration.return_value.last_execution_status = \ + mock_get_deployment.return_value.last_execution_status = \ constants.EXECUTION_STATUS_CANCELED self.assertRaises( - exception.InvalidMigrationState, + exception.InvalidDeploymentState, testutils.get_wrapped_function( - self.server.confirm_migration_minions_allocation), + self.server.confirm_deployment_minions_allocation), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_machine_allocations ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_get_execution_for_migration.assert_not_called() + mock_get_execution_for_deployment.assert_not_called() mock_update_task_info_for_minion_allocations.assert_not_called() mock_begin_tasks.assert_not_called() @@ -3220,43 +3065,43 @@ def test_confirm_migration_minions_allocation_unexpected_status( @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, - '_get_execution_for_migration') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_report_migration_minions_allocation_error( + '_get_execution_for_deployment') + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_report_deployment_minions_allocation_error( self, - mock_get_migration, - mock_get_execution_for_migration, + mock_get_deployment, + mock_get_execution_for_deployment, mock_cancel_tasks_execution, mock_set_tasks_execution_status ): - mock_get_migration.return_value.last_execution_status = \ + mock_get_deployment.return_value.last_execution_status = \ constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS testutils.get_wrapped_function( - self.server.report_migration_minions_allocation_error)( + self.server.report_deployment_minions_allocation_error)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_allocation_error_details ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_get_execution_for_migration.assert_called_once_with( + mock_get_execution_for_deployment.assert_called_once_with( mock.sentinel.context, - mock_get_migration.return_value, + mock_get_deployment.return_value, requery=False ) mock_cancel_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock_get_execution_for_migration.return_value, + mock_get_execution_for_deployment.return_value, requery=True ) mock_set_tasks_execution_status.assert_called_once_with( mock.sentinel.context, - mock_get_execution_for_migration.return_value, + mock_get_execution_for_deployment.return_value, constants.EXECUTION_STATUS_ERROR_ALLOCATING_MINIONS ) @@ -3265,319 +3110,36 @@ def test_report_migration_minions_allocation_error( @mock.patch.object(server.ConductorServerEndpoint, '_cancel_tasks_execution') @mock.patch.object(server.ConductorServerEndpoint, - '_get_execution_for_migration') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_report_migration_minions_allocation_error_unexpected_status( + '_get_execution_for_deployment') + @mock.patch.object(server.ConductorServerEndpoint, '_get_deployment') + def test_report_deployment_minions_allocation_error_unexpected_status( self, - mock_get_migration, - mock_get_execution_for_migration, + mock_get_deployment, + mock_get_execution_for_deployment, mock_cancel_tasks_execution, mock_set_tasks_execution_status ): - mock_get_migration.return_value.last_execution_status = \ + mock_get_deployment.return_value.last_execution_status = \ constants.EXECUTION_STATUS_CANCELED self.assertRaises( - exception.InvalidMigrationState, + exception.InvalidDeploymentState, testutils.get_wrapped_function( - self.server.report_migration_minions_allocation_error), + self.server.report_deployment_minions_allocation_error), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.minion_allocation_error_details ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_get_execution_for_migration.assert_not_called() + mock_get_execution_for_deployment.assert_not_called() mock_cancel_tasks_execution.assert_not_called() mock_set_tasks_execution_status.assert_not_called() - @mock.patch.object( - server.ConductorServerEndpoint, - "get_endpoint" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_check_endpoints" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_get_provider_types" - ) - @mock.patch.object(models, "Migration") - @mock.patch.object(uuid, "uuid4") - @mock.patch.object(models, "TasksExecution") - @mock.patch.object( - server.ConductorServerEndpoint, - "_check_create_reservation_for_transfer" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_check_minion_pools_for_action" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_get_instance_scripts" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_create_task" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_check_execution_tasks_sanity" - ) - @mock.patch.object( - db_api, - "add_migration" - ) - @mock.patch.object( - lockutils, - "lock" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_minion_manager_client" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_set_tasks_execution_status" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "_begin_tasks" - ) - @mock.patch.object( - server.ConductorServerEndpoint, - "get_migration" - ) - @ddt.file_data("data/migrate_instances_config.yml") - @ddt.unpack - def test_migrate_instances( - self, - mock_get_migration, - mock_begin_tasks, - mock_set_tasks_execution_status, - mock_minion_manager_client, - mock_lock, - mock_add_migration, - mock_check_execution_tasks_sanity, - mock_create_task, - mock_get_instance_scripts, - mock_check_minion_pools_for_action, - mock_check_create_reservation_for_transfer, - mock_tasks_execution, - mock_uuid4, - mock_migration, - mock_get_provider_types, - mock_check_endpoints, - mock_get_endpoint, - config, - expected_tasks, - ): - has_origin_minion_pool = config.get( - 'has_origin_minion_pool', False - ) - has_destination_minion_pool = config.get( - 'has_destination_minion_pool', False - ) - has_os_morphing_pool = config.get( - 'has_os_morphing_pool', False - ) - shutdown_instances = config.get('shutdown_instances', False) - skip_os_morphing = config.get('skip_os_morphing', True) - get_optimal_flavor = config.get('get_optimal_flavor', False) - - if get_optimal_flavor: - mock_get_provider_types.return_value = [ - constants.PROVIDER_TYPE_INSTANCE_FLAVOR - ] - - instances = [ - mock.sentinel.instance_1, - mock.sentinel.instance_2, - ] - instance_osmorphing_minion_pool_mappings = {} - if has_os_morphing_pool: - instance_osmorphing_minion_pool_mappings = { - mock.sentinel.instance_1: mock.sentinel.minion_pool_1, - mock.sentinel.instance_2: mock.sentinel.minion_pool_2, - } - - replication_count = 2 - - def create_task_side_effect( - instance, - task_type, - execution, - depends_on=None, - on_error=False, - on_error_only=False - ): - return mock.Mock( - id=task_type, - type=task_type, - instance=instance, - execution=execution, - depends_on=depends_on, - on_error=on_error, - on_error_only=on_error_only, - ) - - mock_create_task.side_effect = create_task_side_effect - - migration = self.server.migrate_instances( - mock.sentinel.context, - mock.sentinel.origin_endpoint_id, - mock.sentinel.destination_endpoint_id, - has_origin_minion_pool - and mock.sentinel.origin_minion_pool_id, - has_destination_minion_pool - and mock.sentinel.destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, - mock.sentinel.source_environment, - mock.sentinel.destination_environment, - instances, - mock.sentinel.network_map, - mock.sentinel.storage_mappings, - replication_count, - shutdown_instances=shutdown_instances, - notes=mock.sentinel.notes, - skip_os_morphing=skip_os_morphing, - user_scripts=mock.sentinel.user_scripts, - ) - - mock_get_endpoint.assert_has_calls([ - mock.call( - mock.sentinel.context, - mock.sentinel.origin_endpoint_id, - ), - mock.call( - mock.sentinel.context, - mock.sentinel.destination_endpoint_id, - ), - ]) - - mock_check_endpoints.assert_called_once_with( - mock.sentinel.context, - mock_get_endpoint.return_value, - mock_get_endpoint.return_value, - ) - - self.assertEqual( - mock_migration.return_value.last_execution_status, - constants.EXECUTION_STATUS_UNEXECUTED, - ) - self.assertEqual( - mock_tasks_execution.return_value.status, - constants.EXECUTION_STATUS_UNEXECUTED, - ) - self.assertEqual( - mock_tasks_execution.return_value.type, - constants.EXECUTION_TYPE_MIGRATION, - ) - - mock_check_create_reservation_for_transfer.assert_called_once_with( - mock_migration.return_value, - licensing_client.RESERVATION_TYPE_MIGRATION, - ) - - mock_check_minion_pools_for_action.assert_called_once_with( - mock.sentinel.context, - mock_migration.return_value, - ) - - for instance in instances: - mock_get_instance_scripts.assert_any_call( - mock.sentinel.user_scripts, - instance, - ) - mock_create_task.assert_has_calls([ - mock.call( - instance, - constants.TASK_TYPE_GET_INSTANCE_INFO, - mock_tasks_execution.return_value, - ), - mock.call( - instance, - constants.TASK_TYPE_VALIDATE_MIGRATION_SOURCE_INPUTS, - mock_tasks_execution.return_value, - ), - mock.call( - instance, - constants.TASK_TYPE_VALIDATE_MIGRATION_DESTINATION_INPUTS, - mock_tasks_execution.return_value, - depends_on=[constants.TASK_TYPE_GET_INSTANCE_INFO] - ), - ]) - - # tasks defined in the yaml config - for task in expected_tasks: - kwargs = {} - if 'on_error' in task: - kwargs = {'on_error': task['on_error']} - if 'on_error_only' in task: - kwargs = {'on_error_only': task['on_error_only']} - mock_create_task.assert_has_calls([ - mock.call( - instance, - task['type'], - mock_tasks_execution.return_value, - depends_on=task['depends_on'], - **kwargs, - ) - ]) - - mock_check_execution_tasks_sanity.assert_called_once_with( - mock_tasks_execution.return_value, - mock_migration.return_value.info, - ) - - mock_add_migration.assert_called_once_with( - mock.sentinel.context, - mock_migration.return_value, - ) - - if any([ - has_origin_minion_pool, - has_destination_minion_pool, - has_os_morphing_pool, - ]): - mock_lock.assert_any_call( - constants.MIGRATION_LOCK_NAME_FORMAT - % mock_migration.return_value.id, - external=True, - ) - mock_minion_manager_client\ - .allocate_minion_machines_for_migration\ - .assert_called_once_with( - mock.sentinel.context, - mock_migration.return_value, - include_transfer_minions=True, - include_osmorphing_minions=not skip_os_morphing, - ) - mock_set_tasks_execution_status.assert_called_once_with( - mock.sentinel.context, - mock_tasks_execution.return_value, - constants.EXECUTION_STATUS_AWAITING_MINION_ALLOCATIONS - ) - else: - mock_begin_tasks.assert_called_once_with( - mock.sentinel.context, - mock_migration.return_value, - mock_tasks_execution.return_value, - ) - - mock_get_migration.assert_called_once_with( - mock.sentinel.context, - mock_migration.return_value.id, - ) - - self.assertEqual(migration, mock_get_migration.return_value) - @mock.patch.object(db_api, 'get_tasks_execution') @mock.patch.object( server.ConductorServerEndpoint, @@ -3684,147 +3246,47 @@ def call_cancel_tasks_execution( exception_details=mock.ANY, ) - @mock.patch.object(db_api, 'get_migration') - def test__get_migration( + @mock.patch.object(db_api, 'get_deployment') + def test__get_deployment( self, - mock_get_migration + mock_get_deployment ): - result = self.server._get_migration( + result = self.server._get_deployment( mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False, to_dict=False ) self.assertEqual( - mock_get_migration.return_value, + mock_get_deployment.return_value, result ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False, to_dict=False ) - mock_get_migration.reset_mock() - mock_get_migration.return_value = None + mock_get_deployment.reset_mock() + mock_get_deployment.return_value = None self.assertRaises( exception.NotFound, - self.server._get_migration, + self.server._get_deployment, mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False, to_dict=False ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, include_task_info=False, to_dict=False ) - @mock.patch.object(db_api, 'delete_migration') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - def test_delete_migration( - self, - mock_get_migration, - mock_delete_migration - ): - migration = mock.Mock() - execution = mock.Mock() - execution.status = constants.EXECUTION_STATUS_COMPLETED - migration.executions = [execution] - mock_get_migration.return_value = migration - - testutils.get_wrapped_function(self.server.delete_migration)( - self.server, - mock.sentinel.context, - mock.sentinel.migration_id - ) - - mock_get_migration.assert_called_once_with( - mock.sentinel.context, - mock.sentinel.migration_id - ) - mock_delete_migration.assert_called_once_with( - mock.sentinel.context, - mock.sentinel.migration_id - ) - mock_get_migration.reset_mock() - mock_delete_migration.reset_mock() - execution.status = constants.EXECUTION_STATUS_RUNNING - - self.assertRaises( - exception.InvalidMigrationState, - testutils.get_wrapped_function(self.server.delete_migration), - self.server, - mock.sentinel.context, - mock.sentinel.migration_id - ) - - mock_get_migration.assert_called_once_with( - mock.sentinel.context, - mock.sentinel.migration_id - ) - mock_delete_migration.assert_not_called() - - @mock.patch.object(server.ConductorServerEndpoint, - '_cancel_tasks_execution') - @mock.patch.object(lockutils, 'lock') - @mock.patch.object(server.ConductorServerEndpoint, '_get_migration') - @ddt.file_data("data/cancel_migration_config.yml") - @ddt.unpack - def test_cancel_migration( - self, - mock_get_migration, - mock_lock, - mock_cancel_tasks_execution, - config, - raises_exception - ): - migration = mock.Mock() - migration.executions = [] - statuses = config.get('execution_statuses', []) - for status in statuses: - execution = mock.Mock() - execution.status = getattr(constants, status) - migration.executions.append(execution) - mock_get_migration.return_value = migration - force = config.get('force', False) - - if raises_exception: - self.assertRaises( - exception.InvalidMigrationState, - testutils.get_wrapped_function(self.server.cancel_migration), - self.server, - mock.sentinel.context, - mock.sentinel.migration_id, - force=force - ) - else: - testutils.get_wrapped_function(self.server.cancel_migration)( - self.server, - mock.sentinel.context, - mock.sentinel.migration_id, - force=force - ) - mock_lock.assert_called_once_with( - constants.EXECUTION_LOCK_NAME_FORMAT % execution.id, - external=True - ) - mock_cancel_tasks_execution.assert_called_once_with( - mock.sentinel.context, - execution, - force=force - ) - - mock_get_migration.assert_called_once_with( - mock.sentinel.context, - mock.sentinel.migration_id - ) - @mock.patch.object(db_api, 'get_tasks_execution') @mock.patch.object( server.ConductorServerEndpoint, @@ -3946,7 +3408,7 @@ def call_set_tasks_execution_status(new_execution_status): ) mock_delete_trust.assert_not_called() mock_get_action.assert_called_once_with( - context, mock_set_execution_status.return_value.action_id) + context, execution.action_id) mock_deallocate_minion_machines_for_action.assert_called_once_with( context, mock_get_action.return_value) @@ -4535,56 +3997,56 @@ def test_advance_execution_state_scheduled_tasks( ) @mock.patch.object(db_api, 'update_transfer_action_info_for_instance') - def test_update_replica_volumes_info( + def test_update_transfer_volumes_info( self, mock_update_transfer_action_info_for_instance ): - self.server._update_replica_volumes_info( + self.server._update_transfer_volumes_info( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.instance, mock.sentinel.updated_task_info ) mock_update_transfer_action_info_for_instance.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.instance, mock.sentinel.updated_task_info ) @mock.patch.object(server.ConductorServerEndpoint, - '_update_replica_volumes_info') + '_update_transfer_volumes_info') @mock.patch.object(lockutils, 'lock') - @mock.patch.object(db_api, 'get_migration') - def test_update_volumes_info_for_migration_parent_replica( + @mock.patch.object(db_api, 'get_deployment') + def test_update_volumes_info_for_deployment_parent_transfer( self, - mock_get_migration, + mock_get_deployment, mock_lock, - mock_update_replica_volumes_info + mock_update_transfer_volumes_info ): - migration = mock.Mock() - mock_get_migration.return_value = migration + deployment = mock.Mock() + mock_get_deployment.return_value = deployment - self.server._update_volumes_info_for_migration_parent_replica( + self.server._update_volumes_info_for_deployment_parent_transfer( mock.sentinel.context, - mock.sentinel.migration_id, + mock.sentinel.deployment_id, mock.sentinel.instance, mock.sentinel.updated_task_info ) - mock_get_migration.assert_called_once_with( + mock_get_deployment.assert_called_once_with( mock.sentinel.context, - mock.sentinel.migration_id + mock.sentinel.deployment_id ) mock_lock.assert_called_once_with( - constants.REPLICA_LOCK_NAME_FORMAT % - mock_get_migration.return_value.replica_id, + constants.TRANSFER_LOCK_NAME_FORMAT % + mock_get_deployment.return_value.transfer_id, external=True ) - mock_update_replica_volumes_info.assert_called_once_with( + mock_update_transfer_volumes_info.assert_called_once_with( mock.sentinel.context, - mock_get_migration.return_value.replica_id, + mock_get_deployment.return_value.transfer_id, mock.sentinel.instance, mock.sentinel.updated_task_info ) @@ -4594,30 +4056,29 @@ def test_update_volumes_info_for_migration_parent_replica( '_minion_manager_client' ) @mock.patch.object(db_api, 'update_minion_machine') - @mock.patch.object(db_api, 'update_replica') + @mock.patch.object(db_api, 'update_transfer') @mock.patch.object( server.ConductorServerEndpoint, - '_update_replica_volumes_info' + '_update_transfer_volumes_info' ) @mock.patch.object(db_api, 'set_transfer_action_result') @mock.patch.object(schemas, 'validate_value') @mock.patch.object( server.ConductorServerEndpoint, - '_update_volumes_info_for_migration_parent_replica' + '_update_volumes_info_for_deployment_parent_transfer' ) def test_handle_post_task_actions( self, - mock_update_volumes_info_for_migration_parent_replica, + mock_update_volumes_info_for_deployment_parent_transfer, mock_validate_value, mock_set_transfer_action_result, - mock_update_replica_volumes_info, - mock_update_replica, + mock_update_transfer_volumes_info, + mock_update_transfer, mock_update_minion_machine, mock_minion_manager_client, ): - # TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS task = mock.Mock( - task_type=constants.TASK_TYPE_RESTORE_REPLICA_DISK_SNAPSHOTS, + task_type=constants.TASK_TYPE_RESTORE_TRANSFER_DISK_SNAPSHOTS, instance=mock.sentinel.instance, ) execution = mock.Mock( @@ -4637,7 +4098,7 @@ def call_handle_post_task_actions(): call_handle_post_task_actions() # no volumes_info - mock_update_volumes_info_for_migration_parent_replica\ + mock_update_volumes_info_for_deployment_parent_transfer\ .assert_not_called() # has volumes_info @@ -4647,7 +4108,7 @@ def call_handle_post_task_actions(): } ] call_handle_post_task_actions() - mock_update_volumes_info_for_migration_parent_replica\ + mock_update_volumes_info_for_deployment_parent_transfer\ .assert_called_once_with( mock.sentinel.context, mock.sentinel.action_id, @@ -4655,12 +4116,11 @@ def call_handle_post_task_actions(): {"volumes_info": task_info["volumes_info"]}, ) - # TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS task.task_type = constants\ - .TASK_TYPE_DELETE_REPLICA_TARGET_DISK_SNAPSHOTS + .TASK_TYPE_DELETE_TRANSFER_TARGET_DISK_SNAPSHOTS call_handle_post_task_actions() # no clone_disks, reset volumes_info - mock_update_volumes_info_for_migration_parent_replica\ + mock_update_volumes_info_for_deployment_parent_transfer\ .assert_called_with( mock.sentinel.context, mock.sentinel.action_id, @@ -4674,17 +4134,14 @@ def call_handle_post_task_actions(): 'id': 'clone_disk_id', } ] - mock_update_volumes_info_for_migration_parent_replica\ + mock_update_volumes_info_for_deployment_parent_transfer\ .reset_mock() call_handle_post_task_actions() - mock_update_volumes_info_for_migration_parent_replica\ + mock_update_volumes_info_for_deployment_parent_transfer\ .assert_not_called() - # TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT - # TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT types = [ constants.TASK_TYPE_FINALIZE_INSTANCE_DEPLOYMENT, - constants.TASK_TYPE_FINALIZE_REPLICA_INSTANCE_DEPLOYMENT, ] for task_type in types: task.task_type = task_type @@ -4724,11 +4181,9 @@ def call_handle_post_task_actions(): mock_validate_value.reset_mock() mock_set_transfer_action_result.reset_mock() - # TASK_TYPE_UPDATE_SOURCE_REPLICA - # TASK_TYPE_UPDATE_DESTINATION_REPLICA types = [ - constants.TASK_TYPE_UPDATE_SOURCE_REPLICA, - constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA, + constants.TASK_TYPE_UPDATE_SOURCE_TRANSFER, + constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER, ] execution.tasks = [ mock.Mock( @@ -4738,9 +4193,9 @@ def call_handle_post_task_actions(): ] for task_type in types: task.task_type = task_type - mock_update_replica_volumes_info.reset_mock() + mock_update_transfer_volumes_info.reset_mock() call_handle_post_task_actions() - mock_update_replica_volumes_info.assert_called_once_with( + mock_update_transfer_volumes_info.assert_called_once_with( mock.sentinel.context, mock.sentinel.action_id, mock.sentinel.instance, @@ -4748,9 +4203,9 @@ def call_handle_post_task_actions(): ) # execution has active tasks - task.type = constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA + task.type = constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER call_handle_post_task_actions() - mock_update_replica.assert_not_called() + mock_update_transfer.assert_not_called() # execution has no active tasks execution.tasks = [ @@ -4760,12 +4215,12 @@ def call_handle_post_task_actions(): ) ] call_handle_post_task_actions() - mock_update_replica.assert_called_once_with( + mock_update_transfer.assert_called_once_with( mock.sentinel.context, mock.sentinel.action_id, task_info ) - mock_update_replica.reset_mock() + mock_update_transfer.reset_mock() # TASK_TYPE_ATTACH_VOLUMES_TO_SOURCE_MINION # TASK_TYPE_DETACH_VOLUMES_FROM_SOURCE_MINION @@ -4874,7 +4329,7 @@ def call_handle_post_task_actions(): # for any other type of task nothing is called task.task_type = constants.TASK_TYPE_COLLECT_OSMORPHING_INFO call_handle_post_task_actions() - mock_update_replica.assert_not_called() + mock_update_transfer.assert_not_called() mock_update_minion_machine.assert_not_called() mock_minion_manager_client.deallocate_minion_machine\ .assert_not_called() @@ -4909,7 +4364,7 @@ def test_task_completed( mock_get_tasks_execution.return_value = mock.Mock( id=mock.sentinel.execution_id, - type=constants.EXECUTION_TYPE_MIGRATION, + type=constants.EXECUTION_TYPE_DEPLOYMENT, action_id=mock.sentinel.action_id, tasks=[ mock.Mock( @@ -5013,9 +4468,6 @@ def test_cancel_execution_for_osmorphing_debugging( @mock.patch.object(server.ConductorServerEndpoint, "_advance_execution_state") - @mock.patch.object(server.ConductorServerEndpoint, - "_check_delete_reservation_for_transfer") - @mock.patch.object(db_api, "get_action") @mock.patch.object(db_api, "get_tasks_execution") @mock.patch.object(db_api, "set_task_status") @mock.patch.object(db_api, "get_task") @@ -5026,8 +4478,6 @@ def test_confirm_task_cancellation( mock_get_task, mock_set_task_status, mock_get_tasks_execution, - mock_get_action, - mock_check_delete_reservation, mock_advance_execution_state, task_status, expected_final_status, @@ -5038,7 +4488,7 @@ def test_confirm_task_cancellation( expected_final_status = getattr(constants, expected_final_status) mock_get_task.return_value = task mock_execution = mock.MagicMock() - mock_execution.type = constants.EXECUTION_TYPE_MIGRATION + mock_execution.type = constants.EXECUTION_TYPE_DEPLOYMENT mock_get_tasks_execution.return_value = mock_execution testutils.get_wrapped_function(self.server.confirm_task_cancellation)( @@ -5057,11 +4507,6 @@ def test_confirm_task_cancellation( if expected_advance_execution_state_call: mock_get_tasks_execution.assert_called_once_with( mock.sentinel.context, task.execution_id) - mock_get_action.assert_called_once_with( - mock.sentinel.context, mock_execution.action_id, - include_task_info=False) - mock_check_delete_reservation.assert_called_once_with( - mock_get_action.return_value) mock_advance_execution_state.assert_called_once_with( mock.sentinel.context, mock_get_tasks_execution.return_value, @@ -5110,7 +4555,7 @@ def test_set_task_error( ): task_status = config['task_status'] mock_get_tasks_execution.return_value = mock.Mock( - type=constants.EXECUTION_TYPE_MIGRATION, + type=constants.EXECUTION_TYPE_DEPLOYMENT, action_id=mock.sentinel.action_id, tasks=[ mock.Mock( @@ -5270,69 +4715,69 @@ def test_update_task_progress_update( new_message=mock.sentinel.new_message, ) - @mock.patch.object(db_api, "get_replica_schedule") - def test__get_replica_schedule( + @mock.patch.object(db_api, "get_transfer_schedule") + def test__get_transfer_schedule( self, - mock_get_replica_schedule + mock_get_transfer_schedule ): - result = self.server._get_replica_schedule( + result = self.server._get_transfer_schedule( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=True ) self.assertEqual( - mock_get_replica_schedule.return_value, + mock_get_transfer_schedule.return_value, result ) - mock_get_replica_schedule.assert_called_once_with( + mock_get_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=True ) - mock_get_replica_schedule.reset_mock() - mock_get_replica_schedule.return_value = None + mock_get_transfer_schedule.reset_mock() + mock_get_transfer_schedule.return_value = None self.assertRaises( exception.NotFound, - self.server._get_replica_schedule, + self.server._get_transfer_schedule, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=False ) - mock_get_replica_schedule.assert_called_once_with( + mock_get_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=False ) - @mock.patch.object(server.ConductorServerEndpoint, "get_replica_schedule") - @mock.patch.object(db_api, "add_replica_schedule") - @mock.patch.object(models, "ReplicaSchedule") - @mock.patch.object(server.ConductorServerEndpoint, "_get_replica") + @mock.patch.object(server.ConductorServerEndpoint, "get_transfer_schedule") + @mock.patch.object(db_api, "add_transfer_schedule") + @mock.patch.object(models, "TransferSchedule") + @mock.patch.object(server.ConductorServerEndpoint, "_get_transfer") @mock.patch.object(keystone, "create_trust") - def test_create_replica_schedule( + def test_create_transfer_schedule( self, mock_create_trust, - mock_get_replica, - mock_ReplicaSchedule, - mock_add_replica_schedule, - mock_get_replica_schedule + mock_get_transfer, + mock_transfer_schedule, + mock_add_transfer_schedule, + mock_get_transfer_schedule ): context = mock.Mock() - replica_schedule = mock.Mock() + transfer_schedule = mock.Mock() context.trust_id = mock.sentinel.trust_id - mock_ReplicaSchedule.return_value = replica_schedule + mock_transfer_schedule.return_value = transfer_schedule - result = self.server.create_replica_schedule( + result = self.server.create_transfer_schedule( context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule, mock.sentinel.enabled, mock.sentinel.exp_date, @@ -5340,22 +4785,22 @@ def test_create_replica_schedule( ) self.assertEqual( - mock_get_replica_schedule.return_value, + mock_get_transfer_schedule.return_value, result ) self.assertEqual( ( - replica_schedule.replica, - replica_schedule.replica_id, - replica_schedule.schedule, - replica_schedule.expiration_date, - replica_schedule.enabled, - replica_schedule.shutdown_instance, - replica_schedule.trust_id + transfer_schedule.transfer, + transfer_schedule.transfer_id, + transfer_schedule.schedule, + transfer_schedule.expiration_date, + transfer_schedule.enabled, + transfer_schedule.shutdown_instance, + transfer_schedule.trust_id ), ( - mock_get_replica.return_value, - mock.sentinel.replica_id, + mock_get_transfer.return_value, + mock.sentinel.transfer_id, mock.sentinel.schedule, mock.sentinel.exp_date, mock.sentinel.enabled, @@ -5364,62 +4809,63 @@ def test_create_replica_schedule( ) ) mock_create_trust.assert_called_once_with(context) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, ) - mock_ReplicaSchedule.assert_called_once() - mock_add_replica_schedule.assert_called_once_with( + mock_transfer_schedule.assert_called_once() + mock_add_transfer_schedule.assert_called_once_with( context, - replica_schedule, + transfer_schedule, mock.ANY ) - mock_get_replica_schedule.assert_called_once_with( + mock_get_transfer_schedule.assert_called_once_with( context, - mock.sentinel.replica_id, - replica_schedule.id + mock.sentinel.transfer_id, + transfer_schedule.id ) - @mock.patch.object(server.ConductorServerEndpoint, "_get_replica_schedule") - @mock.patch.object(db_api, "update_replica_schedule") - def test_update_replica_schedule( + @mock.patch.object( + server.ConductorServerEndpoint, "_get_transfer_schedule") + @mock.patch.object(db_api, "update_transfer_schedule") + def test_update_transfer_schedule( self, - mock_update_replica_schedule, - mock_get_replica_schedule + mock_update_transfer_schedule, + mock_get_transfer_schedule ): result = testutils.get_wrapped_function( - self.server.update_replica_schedule)( + self.server.update_transfer_schedule)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, mock.sentinel.updated_values, ) self.assertEqual( - mock_get_replica_schedule.return_value, + mock_get_transfer_schedule.return_value, result ) - mock_update_replica_schedule.assert_called_once_with( + mock_update_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, mock.sentinel.updated_values, None, mock.ANY ) - mock_get_replica_schedule.assert_called_once_with( + mock_get_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, ) @mock.patch.object(keystone, "delete_trust") @mock.patch.object(context, "get_admin_context") - @mock.patch.object(server.ConductorServerEndpoint, "_replica_cron_client") + @mock.patch.object(server.ConductorServerEndpoint, "_transfer_cron_client") def test_cleanup_schedule_resources( self, - mock_replica_cron_client, + mock_transfer_cron_client, mock_get_admin_context, mock_delete_trust, ): @@ -5431,14 +4877,14 @@ def test_cleanup_schedule_resources( schedule ) - mock_replica_cron_client.unregister.assert_called_once_with( + mock_transfer_cron_client.unregister.assert_called_once_with( mock.sentinel.context, schedule ) mock_get_admin_context.assert_not_called() mock_delete_trust.assert_not_called() - mock_replica_cron_client.reset_mock() + mock_transfer_cron_client.reset_mock() schedule.trust_id = mock.sentinel.trust_id self.server._cleanup_schedule_resources( @@ -5446,7 +4892,7 @@ def test_cleanup_schedule_resources( schedule ) - mock_replica_cron_client.unregister.assert_called_once_with( + mock_transfer_cron_client.unregister.assert_called_once_with( mock.sentinel.context, schedule ) @@ -5455,102 +4901,102 @@ def test_cleanup_schedule_resources( mock_delete_trust.assert_called_once_with( mock_get_admin_context.return_value) - @mock.patch.object(db_api, "delete_replica_schedule") - @mock.patch.object(server.ConductorServerEndpoint, "_get_replica") - def test_delete_replica_schedule( + @mock.patch.object(db_api, "delete_transfer_schedule") + @mock.patch.object(server.ConductorServerEndpoint, "_get_transfer") + def test_delete_transfer_schedule( self, - mock_get_replica, - mock_delete_replica_schedule + mock_get_transfer, + mock_delete_transfer_schedule ): - replica = mock.Mock() - replica.last_execution_status = constants.EXECUTION_STATUS_COMPLETED - mock_get_replica.return_value = replica + transfer = mock.Mock() + transfer.last_execution_status = constants.EXECUTION_STATUS_COMPLETED + mock_get_transfer.return_value = transfer - testutils.get_wrapped_function(self.server.delete_replica_schedule)( + testutils.get_wrapped_function(self.server.delete_transfer_schedule)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_delete_replica_schedule.assert_called_once_with( + mock_delete_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, None, mock.ANY ) - mock_get_replica.reset_mock() - mock_delete_replica_schedule.reset_mock() - replica.last_execution_status = constants.EXECUTION_STATUS_RUNNING + mock_get_transfer.reset_mock() + mock_delete_transfer_schedule.reset_mock() + transfer.last_execution_status = constants.EXECUTION_STATUS_RUNNING self.assertRaises( - exception.InvalidReplicaState, + exception.InvalidTransferState, testutils.get_wrapped_function( - self.server.delete_replica_schedule), + self.server.delete_transfer_schedule), self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id + mock.sentinel.transfer_id ) - mock_delete_replica_schedule.assert_not_called() + mock_delete_transfer_schedule.assert_not_called() - @mock.patch.object(db_api, "get_replica_schedules") - def test_get_replica_schedules(self, mock_get_replica_schedules): + @mock.patch.object(db_api, "get_transfer_schedules") + def test_get_transfer_schedules(self, mock_get_transfer_schedules): result = testutils.get_wrapped_function( - self.server.get_replica_schedules)( + self.server.get_transfer_schedules)( self.server, mock.sentinel.context, - replica_id=None, + transfer_id=None, expired=True ) self.assertEqual( - mock_get_replica_schedules.return_value, + mock_get_transfer_schedules.return_value, result ) - mock_get_replica_schedules.assert_called_once_with( + mock_get_transfer_schedules.assert_called_once_with( mock.sentinel.context, - replica_id=None, + transfer_id=None, expired=True ) - @mock.patch.object(db_api, "get_replica_schedule") - def test_get_replica_schedule(self, mock_get_replica_schedule): + @mock.patch.object(db_api, "get_transfer_schedule") + def test_get_transfer_schedule(self, mock_get_transfer_schedule): result = testutils.get_wrapped_function( - self.server.get_replica_schedule)( + self.server.get_transfer_schedule)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=True ) self.assertEqual( - mock_get_replica_schedule.return_value, + mock_get_transfer_schedule.return_value, result ) - mock_get_replica_schedule.assert_called_once_with( + mock_get_transfer_schedule.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, mock.sentinel.schedule_id, expired=True ) @mock.patch.object(server.ConductorServerEndpoint, - "get_replica_tasks_execution") + "get_transfer_tasks_execution") @mock.patch.object(server.ConductorServerEndpoint, "_begin_tasks") - @mock.patch.object(db_api, "add_replica_tasks_execution") + @mock.patch.object(db_api, "add_transfer_tasks_execution") @mock.patch.object(db_api, "update_transfer_action_info_for_instance") @mock.patch.object(server.ConductorServerEndpoint, "_check_execution_tasks_sanity") @@ -5558,88 +5004,88 @@ def test_get_replica_schedule(self, mock_get_replica_schedule): @mock.patch.object(utils, "sanitize_task_info") @mock.patch.object(models, "TasksExecution") @mock.patch.object(server.ConductorServerEndpoint, - "_check_valid_replica_tasks_execution") + "_check_valid_transfer_tasks_execution") @mock.patch.object(server.ConductorServerEndpoint, - "_check_replica_running_executions") + "_check_transfer_running_executions") @mock.patch.object(server.ConductorServerEndpoint, "_check_minion_pools_for_action") - @mock.patch.object(models, "Replica") - @mock.patch.object(server.ConductorServerEndpoint, "_get_replica") - @ddt.file_data("data/update_replica_config.yml") + @mock.patch.object(models, "Transfer") + @mock.patch.object(server.ConductorServerEndpoint, "_get_transfer") + @ddt.file_data("data/update_transfer_config.yml") @ddt.unpack - def test_update_replica( + def test_update_transfer( self, - mock_get_replica, - mock_Replica, + mock_get_transfer, + mock_transfer, mock_check_minion_pools_for_action, - mock_check_replica_running_executions, - mock_check_valid_replica_tasks_execution, + mock_check_transfer_running_executions, + mock_check_valid_transfer_tasks_execution, mock_TasksExecution, mock_sanitize_task_info, mock_create_task, mock_check_execution_tasks_sanity, mock_update_transfer_action_info_for_instance, - mock_add_replica_tasks_execution, + mock_add_transfer_tasks_execution, mock_begin_tasks, - mock_get_replica_tasks_execution, + mock_get_transfer_tasks_execution, config, has_updated_values, - has_replica_instance + has_transfer_instance ): - replica = mock.Mock() + transfer = mock.Mock() dummy = mock.Mock() execution = mock.Mock() - replica.instances = config['replica'].get("instances", []) - replica.info = config['replica'].get("info", {}) - mock_get_replica.return_value = replica - mock_Replica.return_value = dummy + transfer.instances = config['transfer'].get("instances", []) + transfer.info = config['transfer'].get("info", {}) + mock_get_transfer.return_value = transfer + mock_transfer.return_value = dummy mock_TasksExecution.return_value = execution updated_properties = config.get("updated_properties", {}) - result = testutils.get_wrapped_function(self.server.update_replica)( + result = testutils.get_wrapped_function(self.server.update_transfer)( self.server, mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, updated_properties ) self.assertEqual( - mock_get_replica_tasks_execution.return_value, + mock_get_transfer_tasks_execution.return_value, result ) - mock_get_replica.assert_called_once_with( + mock_get_transfer.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, include_task_info=True ) - mock_check_replica_running_executions.assert_called_once_with( + mock_check_transfer_running_executions.assert_called_once_with( mock.sentinel.context, - mock_get_replica.return_value, + mock_get_transfer.return_value, ) - mock_check_valid_replica_tasks_execution.assert_called_once_with( - mock_get_replica.return_value, + mock_check_valid_transfer_tasks_execution.assert_called_once_with( + mock_get_transfer.return_value, force=True, ) self.assertEqual( execution.action, - mock_get_replica.return_value + mock_get_transfer.return_value ) mock_check_execution_tasks_sanity.assert_called_once_with( execution, - replica.info + transfer.info ) - mock_add_replica_tasks_execution.assert_called_once_with( + mock_add_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, execution ) mock_begin_tasks.assert_called_once_with( mock.sentinel.context, - replica, + transfer, execution ) - mock_get_replica_tasks_execution.assert_called_once_with( + mock_get_transfer_tasks_execution.assert_called_once_with( mock.sentinel.context, - mock.sentinel.replica_id, + mock.sentinel.transfer_id, execution.id ) if has_updated_values: @@ -5649,34 +5095,34 @@ def test_update_replica( ) else: mock_check_minion_pools_for_action.assert_not_called() - if has_replica_instance: + if has_transfer_instance: expected_sanitize_task_info_calls = [] create_task_calls = [] update_transfer_action_info_for_instance_calls = [] - for instance in config['replica'].get("info", {}): + for instance in config['transfer'].get("info", {}): expected_sanitize_task_info_calls.append( mock.call(mock.ANY)) expected_sanitize_task_info_calls.append( - mock.call(replica.info[instance])) + mock.call(transfer.info[instance])) create_task_calls.append(mock.call( instance, constants.TASK_TYPE_GET_INSTANCE_INFO, execution)) create_task_calls.append(mock.call( instance, - constants.TASK_TYPE_UPDATE_SOURCE_REPLICA, + constants.TASK_TYPE_UPDATE_SOURCE_TRANSFER, execution)) create_task_calls.append(mock.call( instance, - constants.TASK_TYPE_UPDATE_DESTINATION_REPLICA, + constants.TASK_TYPE_UPDATE_DESTINATION_TRANSFER, execution, depends_on=mock.ANY)) update_transfer_action_info_for_instance_calls.append( mock.call( mock.sentinel.context, - replica.id, + transfer.id, instance, - replica.info[instance]) + transfer.info[instance]) ) mock_sanitize_task_info.assert_has_calls( expected_sanitize_task_info_calls) @@ -6132,7 +5578,7 @@ def test_set_task_error_os_morphing( mock_conf_conductor, ): execution = mock.Mock( - type=constants.EXECUTION_TYPE_REPLICA_UPDATE, + type=constants.EXECUTION_TYPE_TRANSFER_UPDATE, action_id=mock.sentinel.action_id, tasks=[ mock.Mock( @@ -6179,14 +5625,11 @@ def test_set_task_error_os_morphing( mock_get_tasks_execution.return_value, ) - # migration execution + # deployment execution mock_check_delete_reservation_for_transfer.assert_not_called() - execution.type = constants.EXECUTION_TYPE_MIGRATION + execution.type = constants.EXECUTION_TYPE_DEPLOYMENT self.server.set_task_error( mock.sentinel.context, mock.sentinel.task_id, mock.sentinel.exception_details, ) - mock_check_delete_reservation_for_transfer.assert_called_once_with( - mock_get_action.return_value, - ) diff --git a/coriolis/tests/db/sqlalchemy/test_api.py b/coriolis/tests/db/sqlalchemy/test_api.py index 2838dc29c..de79b5d86 100644 --- a/coriolis/tests/db/sqlalchemy/test_api.py +++ b/coriolis/tests/db/sqlalchemy/test_api.py @@ -15,6 +15,10 @@ class DatabaseSqlalchemyApiTestCase(test_base.CoriolisBaseTestCase): """Test suite for the Coriolis Database Sqlalchemy api.""" + def tearDown(self): + api._facade = None + super(DatabaseSqlalchemyApiTestCase, self).tearDown() + @mock.patch.object(db_session, 'EngineFacade') def test_get_facade_none(self, mock_EngineFacade): cfg.CONF.database.connection = mock.sentinel.connection diff --git a/coriolis/tests/db/sqlalchemy/test_models.py b/coriolis/tests/db/sqlalchemy/test_models.py index faee2c0b1..b22091597 100644 --- a/coriolis/tests/db/sqlalchemy/test_models.py +++ b/coriolis/tests/db/sqlalchemy/test_models.py @@ -324,10 +324,10 @@ class ReplicaTestCase(test_base.CoriolisBaseTestCase): """Test suite for the Coriolis Database Sqlalchemy Replica.""" def test_to_dict(self): - replica = models.Replica() - replica.id = mock.sentinel.id + transfer = models.Transfer() + transfer.id = mock.sentinel.id - result = replica.to_dict() + result = transfer.to_dict() self.assertEqual( mock.sentinel.id, @@ -335,23 +335,24 @@ def test_to_dict(self): ) -class MigrationTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Database Sqlalchemy Migration.""" +class DeploymentTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis Database Sqlalchemy Deployment.""" def test_to_dict(self): - migration = models.Migration() - migration.id = mock.sentinel.id - migration.replica_id = mock.sentinel.replica_id - migration.shutdown_instances = mock.sentinel.shutdown_instances - migration.replication_count = mock.sentinel.replication_count + transfer = mock.MagicMock(id=mock.sentinel.transfer_id) + deployment = models.Deployment() + deployment.id = mock.sentinel.id + deployment.transfer_id = transfer.id + deployment.transfer = transfer + deployment.shutdown_instances = mock.sentinel.shutdown_instances expected_result = { "id": mock.sentinel.id, - "replica_id": mock.sentinel.replica_id, + "transfer_id": mock.sentinel.transfer_id, "shutdown_instances": mock.sentinel.shutdown_instances, - "replication_count": mock.sentinel.replication_count, + "transfer_scenario_type": transfer.scenario, } - result = migration.to_dict() + result = deployment.to_dict() assert all(item in result.items() for item in expected_result.items()) diff --git a/coriolis/tests/db/test_api.py b/coriolis/tests/db/test_api.py index 9c3980ec4..33e90c2dc 100644 --- a/coriolis/tests/db/test_api.py +++ b/coriolis/tests/db/test_api.py @@ -1,29 +1,1111 @@ # Copyright 2017 Cloudbase Solutions Srl # All Rights Reserved. - +import datetime from unittest import mock +import uuid + +import ddt +from oslo_utils import timeutils +import sqlalchemy +from coriolis import constants from coriolis.db import api +from coriolis.db.sqlalchemy import api as sqlalchemy_api +from coriolis.db.sqlalchemy import models from coriolis import exception from coriolis.tests import test_base from coriolis.tests import testutils +CONTEXT_MOCK = mock.MagicMock() +DEFAULT_INSTANCE = "instance1" +DEFAULT_USER_ID = "1" +DEFAULT_PROJECT_ID = "1" +DEFAULT_TASK_INFO = {DEFAULT_INSTANCE: {"volumes_info": []}} +DEFAULT_EXECUTION_STATUS = constants.EXECUTION_STATUS_RUNNING + + +def get_valid_endpoint( + endpoint_id=None, user_id=DEFAULT_USER_ID, + project_id=DEFAULT_PROJECT_ID, connection_info=None, + endpoint_type="openstack", name="test_name", + description="Endpoint Description"): + if endpoint_id is None: + endpoint_id = str(uuid.uuid4()) + if connection_info is None: + connection_info = {"conn_info": {"secret": "info"}} + + endpoint = models.Endpoint() + endpoint.id = endpoint_id + endpoint.user_id = user_id + endpoint.project_id = project_id + endpoint.connection_info = connection_info + endpoint.type = endpoint_type + endpoint.name = name + endpoint.description = description + + return endpoint + + +def create_valid_tasks_execution(): + valid_tasks_execution = models.TasksExecution() + valid_tasks_execution.id = str(uuid.uuid4()) + valid_tasks_execution.status = DEFAULT_EXECUTION_STATUS + valid_tasks_execution.type = constants.EXECUTION_TYPE_TRANSFER_EXECUTION + valid_tasks_execution.number = 1 + + valid_task = models.Task() + valid_task.id = str(uuid.uuid4()) + valid_task.execution = valid_tasks_execution + valid_task.instance = DEFAULT_INSTANCE + valid_task.status = constants.TASK_STATUS_RUNNING + valid_task.task_type = ( + constants.TASK_TYPE_VALIDATE_TRANSFER_SOURCE_INPUTS) + valid_task.index = 1 + valid_task.on_error = False + + valid_progress_update = models.TaskProgressUpdate() + valid_progress_update.id = str(uuid.uuid4()) + valid_progress_update.task = valid_task + valid_progress_update.index = 1 + valid_progress_update.current_step = 0 + + valid_task_event = models.TaskEvent() + valid_task_event.id = str(uuid.uuid4()) + valid_task_event.task = valid_task + valid_task_event.level = constants.TASK_EVENT_INFO + valid_task_event.index = 1 + valid_task_event.message = "event message test" + return valid_tasks_execution + + +class BaseDBAPITestCase(test_base.CoriolisBaseTestCase): + + valid_data = { + "user_scope": {}, + "outer_scope": {} + } + + @classmethod + def setup_scoped_data(cls, region_id, project_id="1"): + data = dict() + valid_endpoint_source = get_valid_endpoint( + endpoint_type='vmware', project_id=project_id) + cls.session.add(valid_endpoint_source) + data['source_endpoint'] = valid_endpoint_source + valid_endpoint_destination = get_valid_endpoint( + endpoint_type='openstack', project_id=project_id) + cls.session.add(valid_endpoint_destination) + data['destination_endpoint'] = valid_endpoint_destination + + valid_endpoint_region_mapping = models.EndpointRegionMapping() + valid_endpoint_region_mapping.id = str(uuid.uuid4()) + valid_endpoint_region_mapping.endpoint_id = valid_endpoint_source.id + valid_endpoint_region_mapping.region_id = region_id + cls.session.add(valid_endpoint_region_mapping) + data['endpoint_mapping'] = valid_endpoint_region_mapping + + valid_transfer = models.Transfer() + valid_transfer.id = str(uuid.uuid4()) + valid_transfer.user_id = project_id + valid_transfer.project_id = project_id + valid_transfer.base_id = valid_transfer.id + valid_transfer.scenario = constants.TRANSFER_SCENARIO_REPLICA + valid_transfer.last_execution_status = DEFAULT_EXECUTION_STATUS + valid_transfer.executions = [] + valid_transfer.instances = [DEFAULT_INSTANCE] + valid_transfer.info = DEFAULT_TASK_INFO + valid_transfer.origin_endpoint_id = valid_endpoint_source.id + valid_transfer.destination_endpoint_id = valid_endpoint_destination.id + cls.session.add(valid_transfer) + data['transfer'] = valid_transfer + + valid_tasks_execution = create_valid_tasks_execution() + valid_tasks_execution.action = valid_transfer + cls.session.add(valid_tasks_execution) + data['tasks_execution'] = valid_tasks_execution + data['task'] = valid_tasks_execution.tasks[0] + + valid_transfer_schedule = models.TransferSchedule() + valid_transfer_schedule.id = str(uuid.uuid4()) + valid_transfer_schedule.transfer = valid_transfer + valid_transfer_schedule.schedule = {} + valid_transfer_schedule.expiration_date = timeutils.utcnow() + valid_transfer_schedule.enabled = True + valid_transfer_schedule.shutdown_instance = False + valid_transfer_schedule.trust_id = str(uuid.uuid4()) + cls.session.add(valid_transfer_schedule) + data['transfer_schedule'] = valid_transfer_schedule + + valid_deployment = models.Deployment() + valid_deployment.id = str(uuid.uuid4()) + valid_deployment.user_id = project_id + valid_deployment.project_id = project_id + valid_deployment.base_id = valid_deployment.id + valid_deployment.last_execution_status = DEFAULT_EXECUTION_STATUS + valid_deployment.instances = [DEFAULT_INSTANCE] + valid_deployment.info = DEFAULT_TASK_INFO + valid_deployment.origin_endpoint_id = valid_endpoint_source.id + valid_deployment.destination_endpoint_id = ( + valid_endpoint_destination.id) + valid_deployment.transfer = valid_transfer + + deployment_execution = create_valid_tasks_execution() + deployment_execution.action = valid_deployment + cls.session.add(valid_deployment) + data['deployment'] = valid_deployment + data['deployment_execution'] = deployment_execution + + return data + + @classmethod + def setup_database_data(cls): + cls.valid_region = models.Region() + cls.valid_region.id = str(uuid.uuid4()) + cls.valid_region.name = "region1" + cls.valid_region.enabled = True + cls.session.add(cls.valid_region) + + cls.valid_data['user_scope'] = cls.setup_scoped_data( + cls.valid_region.id) + cls.valid_data['outer_scope'] = cls.setup_scoped_data( + cls.valid_region.id, project_id="2") + cls.session.commit() + + @classmethod + def setUpClass(cls): + super(BaseDBAPITestCase, cls).setUpClass() + with mock.patch.object(sqlalchemy_api, 'CONF') as mock_conf: + mock_conf.database.connection = "sqlite://" + engine = api.get_engine() + models.BASE.metadata.create_all(engine) + cls.session = api.get_session() + cls.setup_database_data() + + def setUp(self): + super(BaseDBAPITestCase, self).setUp() + self.context = CONTEXT_MOCK + self.context.session = self.session + self.context.show_deleted = False + self.context.user = DEFAULT_USER_ID + self.context.project_id = DEFAULT_PROJECT_ID + self.context.is_admin = False + + def tearDown(self): + self.context.reset_mock() + super(BaseDBAPITestCase, self).tearDown() + + @classmethod + def tearDownClass(cls): + cls.session.rollback() + cls.session.close() + super(BaseDBAPITestCase, cls).tearDownClass() + + +@ddt.ddt +class DBAPITestCase(BaseDBAPITestCase): + """Test suite for the common Coriolis DB API.""" + + def test_get_engine(self): + self.assertEqual(api.get_engine(), api.IMPL.get_engine()) + + def test_get_session(self): + self.assertIsInstance(api.get_session(), sqlalchemy.orm.Session) + + @mock.patch.object(api, 'IMPL') + def test_db_sync(self, mock_impl): + self.assertEqual( + api.db_sync(mock.sentinel.engine, version=mock.sentinel.version), + mock_impl.db_sync.return_value) + mock_impl.db_sync.assert_called_once_with( + mock.sentinel.engine, version=mock.sentinel.version) + + @mock.patch.object(api, 'IMPL') + def test_db_version(self, mock_impl): + self.assertEqual( + api.db_version(mock.sentinel.engine), + mock_impl.db_version.return_value) + mock_impl.db_version.assert_called_once_with(mock.sentinel.engine) + + def test__session(self): + self.assertEqual(api._session(self.context), self.context.session) + + @mock.patch.object(api, 'get_session') + def test__session_no_context(self, mock_get_session): + self.assertEqual( + api._session(None), + mock_get_session.return_value) + + @mock.patch.object(api, 'get_session') + def test__session_sessionless_context(self, mock_get_session): + context = mock.Mock(session=None) + self.assertEqual( + api._session(context), + mock_get_session.return_value) + + @ddt.data( + {"kwargs": None, "expected_result": False}, + {"kwargs": {}, "expected_result": False}, + {"kwargs": {"user_id": None}, "expected_result": False}, + {"kwargs": {"user_id": "1", "project_id": None}, + "expected_result": False}, + {"kwargs": {"user_id": "1", "project_id": "1", "is_admin": True}, + "expected_result": False}, + {"kwargs": {"user_id": "1", "project_id": "1", "is_admin": False}, + "expected_result": True}, + ) + def test_is_user_context(self, data): + kwargs = data.get('kwargs') + if kwargs is None: + context = None + else: + context = mock.Mock(**data.get('kwargs', {})) + self.assertEqual( + api.is_user_context(context), data.get('expected_result')) + + @mock.patch.object(api, '_session') + def test__model_query(self, mock_session): + self.assertEqual( + api._model_query(mock.sentinel.context, mock.sentinel.model), + mock_session.return_value.query.return_value) + mock_session.assert_called_once_with( + mock.sentinel.context) + mock_session.return_value.query.assert_called_once_with( + mock.sentinel.model) + + def test__update_sqlalchemy_object_fields_non_dict_values(self): + self.assertRaises( + exception.InvalidInput, api._update_sqlalchemy_object_fields, + mock.ANY, mock.ANY, None) + + def test__update_sqlalchemy_object_fields_conflict(self): + updateable_fields = ["field1", "field2"] + values_to_update = {"field1": "value1", "field3": "value3"} + self.assertRaises( + exception.Conflict, api._update_sqlalchemy_object_fields, + mock.ANY, updateable_fields, values_to_update) + + def test__update_sqlalchemy_object_fields_invalid_obj_field(self): + self.assertRaises( + exception.InvalidInput, api._update_sqlalchemy_object_fields, + models.Endpoint, ["invalid_field"], {"invalid_field": "new_value"}) + + def test__update_sqlalchemy_object_fields(self): + obj = models.Endpoint() + obj.description = "initial test description" + new_description = "updated test description" + + api._update_sqlalchemy_object_fields( + obj, ["description"], {"description": new_description}) + self.assertEqual(obj.description, new_description) + + def test__soft_delete_aware_query_show_deleted_kwarg(self): + valid_endpoint = get_valid_endpoint() + self.session.add(valid_endpoint) + self.session.commit() + + testutils.get_wrapped_function(api.delete_endpoint)( + self.context, valid_endpoint.id) + self.context.show_deleted = False + result = api._soft_delete_aware_query( + self.context, models.Endpoint, show_deleted=True).filter( + models.Endpoint.id == valid_endpoint.id).first() + self.assertEqual(result.id, valid_endpoint.id) + self.assertIsNotNone(result.deleted_at) + + def test__soft_delete_aware_query_context_show_deleted(self): + valid_endpoint = get_valid_endpoint() + self.session.add(valid_endpoint) + self.session.commit() + + testutils.get_wrapped_function(api.delete_endpoint)( + self.context, valid_endpoint.id) + self.context.show_deleted = True + result = api._soft_delete_aware_query( + self.context, models.Endpoint).filter( + models.Endpoint.id == valid_endpoint.id).first() + self.assertEqual(result.id, valid_endpoint.id) + self.assertIsNotNone(result.deleted_at) + + +class EndpointDBAPITestCase(BaseDBAPITestCase): + + @classmethod + def setUpClass(cls): + super(EndpointDBAPITestCase, cls).setUpClass() + cls.valid_endpoint_source = cls.valid_data['user_scope'].get( + 'source_endpoint') + cls.valid_endpoint_region_mapping = cls.valid_data['user_scope'].get( + 'endpoint_mapping') + cls.outer_scope_endpoint = cls.valid_data['outer_scope'].get( + 'source_endpoint') + + def test_get_endpoints(self): + result = api.get_endpoints(self.context) + self.assertIn(self.valid_endpoint_source, result) + + def test_get_endpoints_admin(self): + self.context.is_admin = True + result = api.get_endpoints(self.context) + self.assertIn(self.outer_scope_endpoint, result) + + def test_get_endpoints_out_of_user_scope(self): + result = api.get_endpoints(self.context) + self.assertNotIn(self.outer_scope_endpoint, result) + + def test_get_endpoint(self): + result = api.get_endpoint(self.context, self.valid_endpoint_source.id) + self.assertEqual(result, self.valid_endpoint_source) + + def test_get_endpoint_admin_context(self): + self.context.is_admin = True + result = api.get_endpoint(self.context, self.outer_scope_endpoint.id) + self.assertEqual(result, self.outer_scope_endpoint) + + def test_get_endpoint_out_of_user_scope(self): + result = api.get_endpoint(self.context, self.outer_scope_endpoint.id) + self.assertIsNone(result) + + def test_add_endpoint(self): + self.context.user = "2" + self.context.project_id = "2" + new_endpoint_id = str(uuid.uuid4()) + new_endpoint = get_valid_endpoint( + endpoint_id=new_endpoint_id, + connection_info={"conn_info": {"new": "info"}}, + endpoint_type="vmware", name="new_endpoint", + description="New Endpoint") + api.add_endpoint(self.context, new_endpoint) + result = api.get_endpoint(self.context, new_endpoint_id) + self.assertEqual(result, new_endpoint) + + def test_update_endpoint_not_found(self): + self.assertRaises( + exception.NotFound, api.update_endpoint, + self.context, "invalid_id", mock.ANY) + + def test_update_endpoint_invalid_values(self): + self.assertRaises( + exception.InvalidInput, api.update_endpoint, + self.context, self.valid_endpoint_source.id, None) + + def test_update_endpoint_invalid_column(self): + self.assertRaises( + exception.Conflict, api.update_endpoint, + self.context, self.valid_endpoint_source.id, {"type": "openstack"}) + + def test_update_endpoint_region_not_found(self): + self.assertRaises( + exception.NotFound, api.update_endpoint, self.context, + self.valid_endpoint_source.id, + {"mapped_regions": ["invalid_region_id"]}) + + def test_update_endpoint(self): + new_region_id = str(uuid.uuid4()) + new_endpoint_name = "new_name" + new_region = models.Region() + new_region.id = new_region_id + new_region.name = "new_region" + new_region.enabled = True + self.session.add(new_region) + self.session.commit() + + api.update_endpoint( + self.context, self.valid_endpoint_source.id, + {"mapped_regions": [new_region_id], "name": new_endpoint_name}) + result = api.get_endpoint(self.context, self.valid_endpoint_source.id) + old_endpoint_region_mapping = api.get_endpoint_region_mapping( + self.context, self.valid_endpoint_source.id, self.valid_region.id) + new_endpoint_region_mapping = api.get_endpoint_region_mapping( + self.context, self.valid_endpoint_source.id, new_region_id)[0] + self.assertEqual(result.name, new_endpoint_name) + self.assertEqual(old_endpoint_region_mapping, []) + self.assertEqual(new_endpoint_region_mapping.region_id, new_region_id) + self.assertEqual( + new_endpoint_region_mapping.endpoint_id, + self.valid_endpoint_source.id) + + @mock.patch.object(api, 'delete_endpoint_region_mapping') + @mock.patch.object(api, 'add_endpoint_region_mapping') + @mock.patch.object(api, 'get_region') + @mock.patch.object(api, '_update_sqlalchemy_object_fields') + def test_update_endpoint_remapping_failure( + self, mock_update_obj, mock_get_region, mock_add_mapping, + mock_delete_mapping): + mock_add_mapping.side_effect = [Exception, None] + + self.assertRaises( + Exception, api.update_endpoint, + self.context, self.valid_endpoint_source.id, + {"mapped_regions": [mock.sentinel.region_id]}) + mock_get_region.assert_called_with( + self.context, mock.sentinel.region_id) + + mock_delete_mapping.side_effect = Exception + mock_update_obj.side_effect = Exception + self.assertRaises( + Exception, api.update_endpoint, self.context, + self.valid_endpoint_source.id, + {"mapped_regions": [mock.sentinel.region_id]}) + + def test_delete_endpoint(self): + new_endpoint = get_valid_endpoint() + new_endpoint_id = new_endpoint.id + new_endpoint_region_mapping = self.valid_endpoint_region_mapping + new_endpoint_region_mapping.endpoint_id = new_endpoint_id + api.add_endpoint(self.context, new_endpoint) + + api.delete_endpoint(self.context, new_endpoint_id) + result = api.get_endpoint(self.context, new_endpoint_id) + mappings = api.get_endpoint_region_mapping( + self.context, new_endpoint_id, self.valid_region.id) + self.assertIsNone(result) + self.assertEqual(mappings, []) + + def test_delete_endpoint_not_found(self): + self.assertRaises( + exception.NotFound, api.delete_endpoint, self.context, "no_id") + + def test_delete_endpoint_admin_context(self): + self.context.is_admin = True + self.context.show_deleted = True + new_outer_scope_endpoint = get_valid_endpoint() + new_outer_scope_endpoint.user_id = "3" + new_outer_scope_endpoint.project_id = "3" + api.add_endpoint(self.context, new_outer_scope_endpoint) + + api.delete_endpoint( + self.context, new_outer_scope_endpoint.id) + result = api.get_endpoint(self.context, new_outer_scope_endpoint.id) + self.assertIsNotNone(result.deleted_at) + + def test_delete_endpoint_out_of_user_scope(self): + new_outer_scope_endpoint = get_valid_endpoint( + user_id="3", project_id="3") + self.session.add(new_outer_scope_endpoint) + self.session.commit() + + self.assertRaises( + exception.NotFound, api.delete_endpoint, self.context, + new_outer_scope_endpoint.id) + + +class TransferTasksExecutionDBAPITestCase(BaseDBAPITestCase): + + @classmethod + def setUpClass(cls): + super(TransferTasksExecutionDBAPITestCase, cls).setUpClass() + cls.valid_transfer = cls.valid_data['user_scope'].get('transfer') + cls.valid_task = cls.valid_data['user_scope'].get('task') + cls.valid_tasks_execution = cls.valid_data['user_scope'].get( + 'tasks_execution') + cls.outer_scope_transfer = cls.valid_data['outer_scope'].get( + 'transfer') + cls.outer_scope_tasks_execution = cls.valid_data['outer_scope'].get( + "tasks_execution") + + def setUp(self): + super(TransferTasksExecutionDBAPITestCase, self).setUp() + self.outer_scope_tasks_execution.status = DEFAULT_EXECUTION_STATUS + self.valid_tasks_execution.status = DEFAULT_EXECUTION_STATUS + + @staticmethod + def _create_dummy_execution(action): + new_tasks_execution = models.TasksExecution() + new_tasks_execution.id = str(uuid.uuid4()) + new_tasks_execution.action = action + new_tasks_execution.status = constants.EXECUTION_STATUS_UNEXECUTED + new_tasks_execution.type = constants.EXECUTION_TYPE_TRANSFER_EXECUTION + new_tasks_execution.number = 0 + + return new_tasks_execution + + def test_get_transfer_tasks_executions_include_info(self): + result = api.get_transfer_tasks_executions( + self.context, self.valid_transfer.id, include_task_info=True) + self.assertTrue(hasattr(result[0].action, 'info')) + + def test_get_transfer_tasks_executions_include_tasks(self): + result = api.get_transfer_tasks_executions( + self.context, self.valid_transfer.id, include_tasks=True) + tasks = [] + for e in result: + tasks.extend(e.tasks) + + self.assertIn(self.valid_task, tasks) + + def test_get_transfer_tasks_executions_to_dict(self): + result = api.get_transfer_tasks_executions( + self.context, self.valid_transfer.id, to_dict=True) + execution_ids = [e['id'] for e in result] + self.assertIn(self.valid_tasks_execution.id, execution_ids) + + def test_get_transfer_tasks_executions(self): + result = api.get_transfer_tasks_executions( + self.context, self.valid_transfer.id) + self.assertIn(self.valid_tasks_execution, result) + + def test_get_transfer_tasks_executions_admin(self): + self.context.is_admin = True + result = api.get_transfer_tasks_executions( + self.context, self.outer_scope_transfer.id) + self.assertIn(self.outer_scope_tasks_execution, result) + + def test_get_transfer_tasks_execution_out_of_user_scope(self): + result = api.get_transfer_tasks_executions( + self.context, self.outer_scope_transfer.id) + self.assertEqual(result, []) + + def test_get_transfer_tasks_execution(self): + result = api.get_transfer_tasks_execution( + self.context, self.valid_transfer.id, + self.valid_tasks_execution.id) + self.assertEqual(result, self.valid_tasks_execution) + + def test_get_transfer_tasks_execution_admin(self): + self.context.is_admin = True + result = api.get_transfer_tasks_execution( + self.context, self.outer_scope_transfer.id, + self.outer_scope_tasks_execution.id) + self.assertEqual(result, self.outer_scope_tasks_execution) + + def test_get_transfer_tasks_execution_out_of_user_context(self): + result = api.get_transfer_tasks_execution( + self.context, self.outer_scope_transfer.id, + self.outer_scope_tasks_execution.id) + self.assertIsNone(result) + + def test_get_transfer_tasks_execution_include_task_info(self): + result = api.get_transfer_tasks_execution( + self.context, self.valid_transfer.id, + self.valid_tasks_execution.id, include_task_info=True) + self.assertTrue(hasattr(result.action, 'info')) + + def test_get_transfer_tasks_execution_to_dict(self): + result = api.get_transfer_tasks_execution( + self.context, self.valid_transfer.id, + self.valid_tasks_execution.id, to_dict=True) + self.assertEqual(result['id'], self.valid_tasks_execution.id) + + def test_add_transfer_tasks_execution(self): + new_tasks_execution = self._create_dummy_execution(self.valid_transfer) + + api.add_transfer_tasks_execution(self.context, new_tasks_execution) + result = api.get_transfer_tasks_execution( + self.context, self.valid_transfer.id, new_tasks_execution.id) + self.assertEqual(new_tasks_execution, result) + self.assertGreater(result.number, 0) + + def test_add_transfer_tasks_execution_admin(self): + self.context.is_admin = True + new_tasks_execution = self._create_dummy_execution( + self.outer_scope_transfer) + api.add_transfer_tasks_execution(self.context, new_tasks_execution) + result = api.get_transfer_tasks_execution( + self.context, self.outer_scope_transfer.id, new_tasks_execution.id) + self.assertEqual(new_tasks_execution, result) + + def test_add_transfer_tasks_execution_out_of_user_context(self): + new_tasks_execution = self._create_dummy_execution( + self.outer_scope_transfer) + self.assertRaises( + exception.NotAuthorized, api.add_transfer_tasks_execution, + self.context, new_tasks_execution) + + def test_delete_transfer_tasks_execution(self): + new_tasks_execution = self._create_dummy_execution(self.valid_transfer) + api.add_transfer_tasks_execution(self.context, new_tasks_execution) + api.delete_transfer_tasks_execution( + self.context, new_tasks_execution.id) + result = api.get_transfer_tasks_execution( + self.context, self.valid_transfer.id, new_tasks_execution.id) + self.assertIsNone(result) + + def test_delete_transfer_tasks_execution_admin(self): + self.context.is_admin = True + new_tasks_execution = self._create_dummy_execution( + self.outer_scope_transfer) + api.add_transfer_tasks_execution(self.context, new_tasks_execution) + api.delete_transfer_tasks_execution( + self.context, new_tasks_execution.id) + result = api.get_transfer_tasks_execution( + self.context, self.outer_scope_transfer.id, new_tasks_execution.id) + self.assertIsNone(result) + + def test_delete_transfer_tasks_execution_out_of_user_scope(self): + self.context.is_admin = True + new_tasks_execution = self._create_dummy_execution( + self.outer_scope_transfer) + api.add_transfer_tasks_execution(self.context, new_tasks_execution) + + self.context.is_admin = False + self.assertRaises( + exception.NotAuthorized, api.delete_transfer_tasks_execution, + self.context, new_tasks_execution.id) + + def test_delete_transfer_tasks_execution_not_found(self): + self.context.is_admin = True + self.assertRaises( + exception.NotFound, api.delete_transfer_tasks_execution, + self.context, "invalid_id") + + def test_set_execution_status_admin(self): + self.context.is_admin = True + new_status = constants.EXECUTION_STATUS_COMPLETED + result = api.set_execution_status( + self.context, self.outer_scope_tasks_execution.id, new_status, + update_action_status=False) + self.assertEqual(result.status, new_status) + + def test_set_execution_status_out_of_user_scope(self): + self.assertRaises( + exception.NotFound, api.set_execution_status, self.context, + self.outer_scope_tasks_execution.id, mock.ANY, + update_action_status=False) + + def test_set_execution_status_not_found(self): + self.assertRaises( + exception.NotFound, api.set_execution_status, self.context, + "invalid_id", mock.ANY, + update_action_status=False) + + def test_set_execution_status_update_action_status(self): + new_status = constants.EXECUTION_STATUS_COMPLETED + api.set_execution_status( + self.context, self.valid_tasks_execution.id, new_status) + self.assertEqual(self.valid_transfer.last_execution_status, new_status) + + +class TransferSchedulesDBAPITestCase(BaseDBAPITestCase): + + @classmethod + def setUpClass(cls): + super(TransferSchedulesDBAPITestCase, cls).setUpClass() + cls.valid_transfer_schedule = cls.valid_data['user_scope'].get( + 'transfer_schedule') + cls.valid_transfer = cls.valid_data['user_scope'].get('transfer') + cls.outer_scope_transfer_schedule = cls.valid_data['outer_scope'].get( + 'transfer_schedule') + cls.outer_scope_transfer = cls.valid_data['outer_scope'].get( + 'transfer') + + @staticmethod + def _create_dummy_transfer_schedule(transfer, expiration_date): + ts = models.TransferSchedule() + ts.id = str(uuid.uuid4()) + ts.transfer = transfer + ts.schedule = {} + ts.expiration_date = expiration_date + ts.enabled = True + ts.shutdown_instance = False + ts.trust_id = str(uuid.uuid4()) + + return ts + + def test__get_transfer_schedules_filter(self): + result = api._get_transfer_schedules_filter(self.context).all() + self.assertIn(self.valid_transfer_schedule, result) + + def test__get_transfer_schedules_filter_admin(self): + self.context.is_admin = True + result = api._get_transfer_schedules_filter( + self.context, schedule_id=self.outer_scope_transfer_schedule.id + ).first() + self.assertEqual(result, self.outer_scope_transfer_schedule) + + def test__get_transfer_schedules_filter_out_of_user_context(self): + result = api._get_transfer_schedules_filter( + self.context, schedule_id=self.outer_scope_transfer_schedule.id + ).first() + self.assertIsNone(result) + + def test__get_transfer_schedules_filter_by_transfer(self): + result = api._get_transfer_schedules_filter( + self.context, transfer_id=self.valid_transfer_schedule.transfer_id) + self.assertEqual(result.first(), self.valid_transfer_schedule) + + def test__get_transfer_schedules_filter_by_schedule_id(self): + result = api._get_transfer_schedules_filter( + self.context, schedule_id=self.valid_transfer_schedule.id).first() + self.assertEqual(result, self.valid_transfer_schedule) + + def test__get_transfer_schedules_filter_by_not_expired(self): + expiration_date = timeutils.utcnow() + datetime.timedelta(days=1) + unexpired_transfer_schedule = self._create_dummy_transfer_schedule( + self.valid_transfer, expiration_date=expiration_date) + self.session.add(unexpired_transfer_schedule) + expiration_null_transfer_schedule = ( + self._create_dummy_transfer_schedule( + self.valid_transfer, expiration_date=None)) + self.session.add(expiration_null_transfer_schedule) + result = api._get_transfer_schedules_filter( + self.context, expired=False).all() + self.assertIn(unexpired_transfer_schedule, result) + self.assertIn(expiration_null_transfer_schedule, result) + + def test_get_transfer_schedules(self): + result = api.get_transfer_schedules(self.context) + self.assertIn(self.valid_transfer_schedule, result) + + def test_get_transfer_schedule(self): + result = api.get_transfer_schedule( + self.context, self.valid_transfer.id, + self.valid_transfer_schedule.id) + self.assertEqual(result, self.valid_transfer_schedule) + + def test_update_transfer_schedule(self): + pre_update_mock = mock.Mock() + post_update_mock = mock.Mock() + api.update_transfer_schedule( + self.context, self.valid_transfer.id, + self.valid_transfer_schedule.id, {"shutdown_instance": True}, + pre_update_callable=pre_update_mock, + post_update_callable=post_update_mock) + result = api.get_transfer_schedule( + self.context, self.valid_transfer.id, + self.valid_transfer_schedule.id) + self.assertEqual(result.shutdown_instance, True) + pre_update_mock.assert_called_once_with( + schedule=self.valid_transfer_schedule) + post_update_mock.assert_called_once_with( + self.context, self.valid_transfer_schedule) + + def test_delete_transfer_schedule_not_found(self): + self.assertRaises(exception.NotFound, api.delete_transfer_schedule, + self.context, self.valid_transfer.id, "invalid") + + def test_delete_transfer_schedule_admin(self): + self.context.is_admin = True + outer_scope_schedule = self._create_dummy_transfer_schedule( + self.outer_scope_transfer, None) + self.session.add(outer_scope_schedule) + api.delete_transfer_schedule( + self.context, self.outer_scope_transfer.id, + outer_scope_schedule.id) + result = api.get_transfer_schedule( + self.context, self.outer_scope_transfer.id, + outer_scope_schedule.id) + self.assertIsNone(result) + + def test_delete_transfer_schedule_out_of_user_context(self): + outer_scope_schedule = self._create_dummy_transfer_schedule( + self.outer_scope_transfer, None) + self.session.add(outer_scope_schedule) + self.assertRaises( + exception.NotAuthorized, api.delete_transfer_schedule, + self.context, self.outer_scope_transfer.id, + outer_scope_schedule.id) + + def test_delete_transfer_schedule(self): + dummy_transfer_schedule = self._create_dummy_transfer_schedule( + self.valid_transfer, None) + self.session.add(dummy_transfer_schedule) + pre_delete_mock = mock.Mock() + post_delete_mock = mock.Mock() + api.delete_transfer_schedule( + self.context, self.valid_transfer.id, dummy_transfer_schedule.id, + pre_delete_callable=pre_delete_mock, + post_delete_callable=post_delete_mock) + result = api.get_transfer_schedule( + self.context, self.valid_transfer.id, dummy_transfer_schedule.id) + self.assertIsNone(result) + pre_delete_mock.assert_called_once_with( + self.context, dummy_transfer_schedule) + post_delete_mock.assert_called_once_with( + self.context, dummy_transfer_schedule) + + def test_delete_transfer_schedule_already_deleted(self): + dummy_transfer_schedule = self._create_dummy_transfer_schedule( + self.valid_transfer, None) + self.session.add(dummy_transfer_schedule) + + def pre_delete(context, schedule): + schedule.deleted = True + schedule.deleted_at = timeutils.utcnow() + context.session.commit() + + self.assertRaises( + exception.NotFound, api.delete_transfer_schedule, + self.context, self.valid_transfer.id, dummy_transfer_schedule.id, + pre_delete_callable=pre_delete) + + def test_add_transfer_schedule(self): + new_schedule = self._create_dummy_transfer_schedule( + self.valid_transfer, None) + post_add_mock = mock.Mock() + api.add_transfer_schedule( + self.context, new_schedule, post_create_callable=post_add_mock) + result = api.get_transfer_schedule( + self.context, self.valid_transfer.id, new_schedule.id) + self.assertEqual(result, new_schedule) + post_add_mock.assert_called_once_with(self.context, new_schedule) + + def test_add_transfer_schedule_out_of_user_context(self): + new_schedule = self._create_dummy_transfer_schedule( + self.outer_scope_transfer, None) + self.assertRaises( + exception.NotAuthorized, api.add_transfer_schedule, + self.context, new_schedule) + + +class TransfersDBAPITestCase(BaseDBAPITestCase): + + @classmethod + def setUpClass(cls): + super(TransfersDBAPITestCase, cls).setUpClass() + cls.valid_transfer = cls.valid_data['user_scope'].get('transfer') + cls.valid_transfer_execution = cls.valid_data['user_scope'].get( + 'tasks_execution') + cls.outer_scope_transfer = cls.valid_data['outer_scope'].get( + 'transfer') + + @staticmethod + def _create_dummy_transfer(scenario=constants.TRANSFER_SCENARIO_REPLICA, + origin_endpoint_id=str(uuid.uuid4()), + destination_endpoint_id=str(uuid.uuid4()), + project_id=DEFAULT_PROJECT_ID): + transfer = models.Transfer() + transfer.id = str(uuid.uuid4()) + transfer.user_id = project_id + transfer.project_id = project_id + transfer.base_id = transfer.id + transfer.scenario = scenario + transfer.last_execution_status = DEFAULT_EXECUTION_STATUS + transfer.executions = [] + transfer.instances = [DEFAULT_INSTANCE] + transfer.info = DEFAULT_TASK_INFO + transfer.origin_endpoint_id = origin_endpoint_id + transfer.destination_endpoint_id = destination_endpoint_id + + return transfer + + def test_get_transfers_admin(self): + self.context.is_admin = True + result = api.get_transfers(self.context) + self.assertIn(self.outer_scope_transfer, result) + + def test_get_transfers_out_of_user_context(self): + result = api.get_transfers(self.context) + self.assertNotIn(self.outer_scope_transfer, result) + + def test_get_transfers(self): + result = api.get_transfers(self.context) + self.assertIn(self.valid_transfer, result) + + def test_get_transfers_include_tasks_executions(self): + result = api.get_transfers(self.context, include_tasks_executions=True) + executions = [] + for transfer in result: + executions.extend(transfer.executions) + self.assertIn(self.valid_transfer_execution, executions) + + def test_get_transfers_include_task_info(self): + result = api.get_transfers(self.context, include_task_info=True) + self.assertTrue(hasattr(result[0], 'info')) + + def test_get_transfers_transfer_scenario(self): + scenario = constants.TRANSFER_SCENARIO_REPLICA + result = api.get_transfers(self.context, transfer_scenario=scenario) + self.assertTrue(all([res.scenario == scenario for res in result])) + + def test_get_transfers_to_dict(self): + result = api.get_transfers(self.context, to_dict=True) + transfer_ids = [res['id'] for res in result] + self.assertIn(self.valid_transfer.id, transfer_ids) + + def test_get_transfer_admin(self): + self.context.is_admin = True + result = api.get_transfer(self.context, self.outer_scope_transfer.id) + self.assertEqual(result, self.outer_scope_transfer) + + def test_get_transfer_include_task_info(self): + result = api.get_transfer( + self.context, self.valid_transfer.id, include_task_info=True) + self.assertEqual(result.info, DEFAULT_TASK_INFO) + + def test_get_transfer_by_scenario(self): + result = api.get_transfer( + self.context, self.valid_transfer.id, + transfer_scenario=constants.TRANSFER_SCENARIO_REPLICA) + self.assertEqual(result, self.valid_transfer) + + def test_get_transfer_out_of_user_scope(self): + result = api.get_transfer(self.context, self.outer_scope_transfer.id) + self.assertIsNone(result) + + def test_get_transfer_to_dict(self): + result = api.get_transfer( + self.context, self.valid_transfer.id, to_dict=True) + self.assertEqual(result['id'], self.valid_transfer.id) + + result = api.get_transfer(self.context, "invalid", to_dict=True) + self.assertIsNone(result) + + def test_get_endpoint_transfers_count(self): + origin_endpoint_id = str(uuid.uuid4()) + dest_endpoint_id = str(uuid.uuid4()) + dummy_transfer_replica = self._create_dummy_transfer( + origin_endpoint_id=origin_endpoint_id, + destination_endpoint_id=dest_endpoint_id) + dummy_transfer_migration = self._create_dummy_transfer( + scenario=constants.TRANSFER_SCENARIO_LIVE_MIGRATION, + origin_endpoint_id=origin_endpoint_id, + destination_endpoint_id=dest_endpoint_id) + self.session.add(dummy_transfer_replica) + self.session.add(dummy_transfer_migration) + + result = api.get_endpoint_transfers_count( + self.context, origin_endpoint_id) + self.assertEqual(result, 2) + + result = api.get_endpoint_transfers_count( + self.context, origin_endpoint_id, + transfer_scenario=constants.TRANSFER_SCENARIO_REPLICA) + self.assertEqual(result, 1) + + def test_add_transfer(self): + dummy_transfer = self._create_dummy_transfer() + api.add_transfer(self.context, dummy_transfer) + result = api.get_transfer(self.context, dummy_transfer.id) + self.assertEqual(result, dummy_transfer) + + +class DeploymentsDBAPITestCase(BaseDBAPITestCase): + + @classmethod + def setUpClass(cls): + super(DeploymentsDBAPITestCase, cls).setUpClass() + cls.user_deployment = cls.valid_data['user_scope'].get('deployment') + cls.outer_scope_deployment = cls.valid_data['outer_scope'].get( + 'deployment') + cls.user_deployment_execution = cls.valid_data['user_scope'].get( + 'deployment_execution') + cls.outer_scope_deployment_execution = cls.valid_data[ + 'outer_scope'].get('deployment_execution') + cls.user_deployment_task = cls.user_deployment_execution.tasks[0] + cls.user_transfer = cls.valid_data['user_scope'].get('transfer') + + @staticmethod + def _create_dummy_deployment(transfer_id, + origin_endpoint_id=str(uuid.uuid4()), + destination_endpoint_id=str(uuid.uuid4()), + project_id=DEFAULT_PROJECT_ID): + deployment = models.Deployment() + deployment.id = str(uuid.uuid4()) + deployment.user_id = project_id + deployment.project_id = project_id + deployment.base_id = deployment.id + deployment.transfer_id = transfer_id + deployment.last_execution_status = DEFAULT_EXECUTION_STATUS + deployment.executions = [] + deployment.instances = [DEFAULT_INSTANCE] + deployment.info = DEFAULT_TASK_INFO + deployment.origin_endpoint_id = origin_endpoint_id + deployment.destination_endpoint_id = destination_endpoint_id + + return deployment + + def test_get_transfers_deployments_admin(self): + self.context.is_admin = True + result = api.get_transfer_deployments( + self.context, self.user_deployment.transfer_id) + self.assertIn(self.user_deployment, result) + + def test_get_transfer_deployments_out_of_user_context(self): + result = api.get_transfer_deployments( + self.context, self.outer_scope_deployment.transfer_id) + self.assertNotIn(self.outer_scope_deployment, result) + + def test_get_transfer_deployments(self): + result = api.get_transfer_deployments( + self.context, self.user_deployment.transfer_id) + self.assertIn(self.user_deployment, result) + + def test_get_deployments_admin(self): + self.context.is_admin = True + result = api.get_deployments(self.context) + self.assertIn(self.outer_scope_deployment, result) + self.assertIn(self.outer_scope_deployment_execution, + self.outer_scope_deployment.executions) + + def test_get_deployments_include_tasks(self): + result = api.get_deployments(self.context, include_tasks=True) + self.assertIn(self.user_deployment, result) + tasks = [] + for dep in result: + for execution in dep.executions: + tasks.extend(execution.tasks) + self.assertIn(self.user_deployment_task, tasks) + + def test_get_deployments_include_task_info(self): + result = api.get_deployments(self.context, include_task_info=True) + for dep in result: + if dep.id == self.user_deployment.id: + self.assertEqual(dep.info, DEFAULT_TASK_INFO) + + def test_get_deployments_out_of_user_context(self): + result = api.get_deployments(self.context) + self.assertNotIn(self.outer_scope_deployment, result) + + def test_get_deployments(self): + result = api.get_deployments(self.context) + self.assertIn(self.user_deployment, result) + + def test_get_deployments_to_dict(self): + result = api.get_deployments(self.context, to_dict=True) + self.assertIn(self.user_deployment.id, [d['id'] for d in result]) + + def test_get_deployment_admin(self): + self.context.is_admin = True + result = api.get_deployment( + self.context, self.outer_scope_deployment.id) + self.assertEqual(self.outer_scope_deployment, result) + + def test_get_deployment_include_task_info(self): + result = api.get_deployment(self.context, self.user_deployment.id, + include_task_info=True) + self.assertEqual(result.info, self.user_deployment.info) + + def test_get_deployment_out_of_user_context(self): + result = api.get_deployment( + self.context, self.outer_scope_deployment.id) + self.assertIsNone(result) + + def test_get_deployment(self): + result = api.get_deployment(self.context, self.user_deployment.id) + self.assertEqual(result, self.user_deployment) + + def test_get_deployment_to_dict(self): + result = api.get_deployment(self.context, self.user_deployment.id, + to_dict=True) + self.assertEqual(result['id'], self.user_deployment.id) + + def test_add_deployment(self): + dummy_deployment = self._create_dummy_deployment(self.user_transfer.id) + api.add_deployment(self.context, dummy_deployment) + result = api.get_deployment(self.context, dummy_deployment.id) + self.assertEqual(result, dummy_deployment) + -class DBAPITestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis DB API.""" +class BaseTransferActionDBAPITestCase(BaseDBAPITestCase): - @mock.patch.object(api, 'get_endpoint') - def test_update_endpoint_not_found(self, mock_get_endpoint): - mock_get_endpoint.return_value = None + @classmethod + def setUpClass(cls): + super(BaseTransferActionDBAPITestCase, cls).setUpClass() + cls.user_transfer = cls.valid_data['user_scope'].get('transfer') + cls.outer_scope_transfer = cls.valid_data['outer_scope'].get( + 'transfer') - # We only need to test the unwrapped functions. Without this, - # when calling a coriolis.db.api function, it will try to - # establish an SQL connection. - update_endpoint = testutils.get_wrapped_function(api.update_endpoint) + def test_get_action_admin(self): + self.context.is_admin = True + result = api.get_action(self.context, self.outer_scope_transfer.id) + self.assertEqual(result, self.outer_scope_transfer) - self.assertRaises(exception.NotFound, update_endpoint, - mock.sentinel.context, mock.sentinel.endpoint_id, - mock.sentinel.updated_values) + def test_get_action_not_found(self): + self.assertRaises( + exception.NotFound, api.get_action, self.context, + self.outer_scope_transfer.id) - mock_get_endpoint.assert_called_once_with(mock.sentinel.context, - mock.sentinel.endpoint_id) + def test_get_action_include_task_info(self): + result = api.get_action( + self.context, self.user_transfer.id, include_task_info=True) + self.assertEqual(result.info, self.user_transfer.info) diff --git a/coriolis/tests/migrations/test_api.py b/coriolis/tests/migrations/test_api.py deleted file mode 100644 index 4fa4fa4a1..000000000 --- a/coriolis/tests/migrations/test_api.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2024 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from coriolis.migrations import api as migrations_module -from coriolis.tests import test_base - - -class APITestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis Migrations API.""" - - def setUp(self): - super(APITestCase, self).setUp() - self.api = migrations_module.API() - self.rpc_client = mock.MagicMock() - self.api._rpc_client = self.rpc_client - self.ctxt = mock.sentinel.ctxt - self.migration_id = mock.sentinel.migration_id - - def test_migrate_instances(self): - origin_endpoint_id = mock.sentinel.origin_endpoint_id - destination_endpoint_id = mock.sentinel.destination_endpoint_id - origin_minion_pool_id = mock.sentinel.origin_minion_pool_id - destination_minion_pool_id = mock.sentinel.destination_minion_pool_id - instance_osmorphing_minion_pool_mappings = ( - mock.sentinel.instance_osmorphing_minion_pool_mappings) - source_environment = mock.sentinel.source_environment - destination_environment = mock.sentinel.destination_environment - instances = mock.sentinel.instances - network_map = mock.sentinel.network_map - storage_mappings = mock.sentinel.storage_mappings - replication_count = mock.sentinel.replication_count - shutdown_instances = mock.sentinel.shutdown_instances - - result = self.api.migrate_instances( - self.ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, source_environment, - destination_environment, instances, network_map, storage_mappings, - replication_count, shutdown_instances) - self.rpc_client.migrate_instances.assert_called_once_with( - self.ctxt, origin_endpoint_id, destination_endpoint_id, - origin_minion_pool_id, destination_minion_pool_id, - instance_osmorphing_minion_pool_mappings, source_environment, - destination_environment, instances, network_map, storage_mappings, - replication_count, shutdown_instances=shutdown_instances, - notes=None, skip_os_morphing=False, user_scripts=None) - self.assertEqual(result, - self.rpc_client.migrate_instances.return_value) - - def test_deploy_replica_instances(self): - replica_id = mock.sentinel.replica_id - instance_osmorphing_minion_pool_mappings = ( - mock.sentinel.instance_osmorphing_minion_pool_mappings) - - result = self.api.deploy_replica_instances( - self.ctxt, replica_id, instance_osmorphing_minion_pool_mappings) - - self.rpc_client.deploy_replica_instances.assert_called_once_with( - self.ctxt, replica_id, - instance_osmorphing_minion_pool_mappings=( - instance_osmorphing_minion_pool_mappings), - clone_disks=False, force=False, skip_os_morphing=False, - user_scripts=None) - self.assertEqual(result, - self.rpc_client.deploy_replica_instances.return_value) - - def test_delete(self): - self.api.delete(self.ctxt, self.migration_id) - self.rpc_client.delete_migration.assert_called_once_with( - self.ctxt, self.migration_id) - - def test_cancel(self): - self.api.cancel(self.ctxt, self.migration_id, True) - self.rpc_client.cancel_migration.assert_called_once_with( - self.ctxt, self.migration_id, True) - - def test_get_migrations(self): - result = self.api.get_migrations(self.ctxt, include_tasks=False, - include_task_info=False) - - self.rpc_client.get_migrations.assert_called_once_with( - self.ctxt, False, include_task_info=False) - self.assertEqual(result, self.rpc_client.get_migrations.return_value) - - def test_get_migration(self): - result = self.api.get_migration(self.ctxt, self.migration_id, - include_task_info=False) - - self.rpc_client.get_migration.assert_called_once_with( - self.ctxt, self.migration_id, include_task_info=False) - self.assertEqual(result, self.rpc_client.get_migration.return_value) diff --git a/coriolis/tests/minion_manager/rpc/test_client.py b/coriolis/tests/minion_manager/rpc/test_client.py index c2e56936a..ac7c91591 100644 --- a/coriolis/tests/minion_manager/rpc/test_client.py +++ b/coriolis/tests/minion_manager/rpc/test_client.py @@ -122,22 +122,24 @@ def test_validate_minion_pool_selections_for_action(self): self.client.validate_minion_pool_selections_for_action, args ) - def test_allocate_minion_machines_for_replica(self): - args = {"replica": "test_replica"} + def test_allocate_minion_machines_for_transfer(self): + args = {"transfer": "test_transfer"} self._test( - self.client.allocate_minion_machines_for_replica, args, + self.client.allocate_minion_machines_for_transfer, args, rpc_op='_cast', + server_fun_name='allocate_minion_machines_for_transfer' ) - def test_allocate_minion_machines_for_migration(self): + def test_allocate_minion_machines_for_deployment(self): args = { - "migration": "test_migration", + "deployment": "test_deployment", "include_transfer_minions": True, "include_osmorphing_minions": True } self._test( - self.client.allocate_minion_machines_for_migration, args, + self.client.allocate_minion_machines_for_deployment, args, rpc_op='_cast', + server_fun_name='allocate_minion_machines_for_deployment' ) def test_deallocate_minion_machine(self): diff --git a/coriolis/tests/minion_manager/rpc/test_tasks.py b/coriolis/tests/minion_manager/rpc/test_tasks.py index e32e56757..42990f8c9 100644 --- a/coriolis/tests/minion_manager/rpc/test_tasks.py +++ b/coriolis/tests/minion_manager/rpc/test_tasks.py @@ -233,15 +233,15 @@ def test_get_task_name(self): ) @mock.patch.object( - ConductorClient, 'report_migration_minions_allocation_error' + ConductorClient, 'report_deployment_minions_allocation_error' ) def test__report_machine_allocation_failure( - self, mock_report_migration_minions_allocation_error): + self, mock_report_depl_minions_alloation_error): result = self.task._report_machine_allocation_failure( mock.sentinel.context, self.action_id, mock.sentinel.failure_str) self.assertIsNone(result) - mock_report_migration_minions_allocation_error.assert_called_once_with( + mock_report_depl_minions_alloation_error.assert_called_once_with( mock.sentinel.context, self.action_id, mock.sentinel.failure_str ) @@ -263,15 +263,15 @@ def test_get_task_name(self): ) @mock.patch.object( - ConductorClient, 'report_replica_minions_allocation_error' + ConductorClient, 'report_transfer_minions_allocation_error' ) def test__report_machine_allocation_failure( - self, mock_report_replica_minions_allocation_error): + self, mock_report_transfer_minions_allocation_error): result = self.task._report_machine_allocation_failure( mock.sentinel.context, self.action_id, mock.sentinel.failure_str) self.assertIsNone(result) - mock_report_replica_minions_allocation_error.assert_called_once_with( + mock_report_transfer_minions_allocation_error.assert_called_once_with( mock.sentinel.context, self.action_id, mock.sentinel.failure_str ) @@ -465,7 +465,7 @@ def test_execute_raises_exception_when_invalid_migration_state( mock_get_action_label): mock_get_minion_machine.return_value = self.minion_machine mock_confirm_allocation.side_effect = [ - exception.InvalidReplicaState(reason='Invalid state')] + exception.InvalidTransferState(reason='Invalid state')] self.assertRaises( exception.MinionMachineAllocationFailure, @@ -500,16 +500,16 @@ def test_get_task_name(self): ) @mock.patch.object( - ConductorClient, 'confirm_migration_minions_allocation' + ConductorClient, 'confirm_deployment_minions_allocation' ) def test__confirm_machine_allocation_for_action( - self, mock_confirm_migration_minions_allocation): + self, mock_confirm_deployment_minions_allocation): result = self.task._confirm_machine_allocation_for_action( mock.sentinel.context, self.action_id, mock.sentinel.machine_allocations) self.assertIsNone(result) - mock_confirm_migration_minions_allocation.assert_called_once_with( + mock_confirm_deployment_minions_allocation.assert_called_once_with( mock.sentinel.context, self.action_id, mock.sentinel.machine_allocations) @@ -538,16 +538,16 @@ def test_get_task_name(self): ) @mock.patch.object( - ConductorClient, 'confirm_replica_minions_allocation' + ConductorClient, 'confirm_transfer_minions_allocation' ) def test__confirm_machine_allocation_for_action( - self, mock_confirm_replica_minions_allocation): + self, mock_confirm_transfer_minions_allocation): result = self.task._confirm_machine_allocation_for_action( mock.sentinel.context, mock.sentinel.action_id, mock.sentinel.machine_allocations) self.assertIsNone(result) - mock_confirm_replica_minions_allocation.assert_called_once_with( + mock_confirm_transfer_minions_allocation.assert_called_once_with( mock.sentinel.context, mock.sentinel.action_id, mock.sentinel.machine_allocations) diff --git a/coriolis/tests/replica_cron/test_api.py b/coriolis/tests/replica_cron/test_api.py deleted file mode 100644 index a78434e46..000000000 --- a/coriolis/tests/replica_cron/test_api.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2024 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from coriolis.replica_cron import api as replicas_cron_module -from coriolis.tests import test_base - - -class APITestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis API class.""" - - def setUp(self): - super(APITestCase, self).setUp() - self.api = replicas_cron_module.API() - self.rpc_client = mock.MagicMock() - self.api._rpc_client = self.rpc_client - self.ctxt = mock.sentinel.ctxt - self.replica_id = mock.sentinel.replica_id - self.schedule_id = mock.sentinel.schedule_id - - def test_create(self): - schedule = mock.sentinel.schedule - enabled = mock.sentinel.enabled - exp_date = mock.sentinel.exp_date - shutdown_instance = mock.sentinel.shutdown_instance - - result = self.api.create( - self.ctxt, self.replica_id, schedule, enabled, exp_date, - shutdown_instance) - - self.rpc_client.create_replica_schedule.assert_called_once_with( - self.ctxt, self.replica_id, schedule, enabled, exp_date, - shutdown_instance) - self.assertEqual(result, - self.rpc_client.create_replica_schedule.return_value) - - def test_get_schedules(self): - result = self.api.get_schedules(self.ctxt, self.replica_id) - - self.rpc_client.get_replica_schedules.assert_called_once_with( - self.ctxt, self.replica_id, expired=True) - self.assertEqual(result, - self.rpc_client.get_replica_schedules.return_value) - - def test_get_schedule(self): - result = self.api.get_schedule(self.ctxt, self.replica_id, - self.schedule_id) - - self.rpc_client.get_replica_schedule.assert_called_once_with( - self.ctxt, self.replica_id, self.schedule_id, expired=True) - self.assertEqual(result, - self.rpc_client.get_replica_schedule.return_value) - - def test_update(self): - update_values = mock.sentinel.update_values - - result = self.api.update(self.ctxt, self.replica_id, self.schedule_id, - update_values) - - self.rpc_client.update_replica_schedule.assert_called_once_with( - self.ctxt, self.replica_id, self.schedule_id, update_values) - self.assertEqual(result, - self.rpc_client.update_replica_schedule.return_value) - - def test_delete(self): - self.api.delete(self.ctxt, self.replica_id, self.schedule_id) - self.rpc_client.delete_replica_schedule.assert_called_once_with( - self.ctxt, self.replica_id, self.schedule_id) diff --git a/coriolis/tests/replica_tasks_executions/test_api.py b/coriolis/tests/replica_tasks_executions/test_api.py deleted file mode 100644 index 3ae28f414..000000000 --- a/coriolis/tests/replica_tasks_executions/test_api.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2024 Cloudbase Solutions Srl -# All Rights Reserved. - -from unittest import mock - -from coriolis.replica_tasks_executions import api as replicas_module -from coriolis.tests import test_base - - -class APITestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis API class.""" - - def setUp(self): - super(APITestCase, self).setUp() - self.api = replicas_module.API() - self.rpc_client = mock.MagicMock() - self.api._rpc_client = self.rpc_client - self.ctxt = mock.sentinel.ctxt - self.replica_id = mock.sentinel.replica_id - self.execution_id = mock.sentinel.execution_id - - def test_create(self): - shutdown_instances = mock.sentinel.shutdown_instances - - result = self.api.create(self.ctxt, self.replica_id, - shutdown_instances) - - self.rpc_client.execute_replica_tasks.assert_called_once_with( - self.ctxt, self.replica_id, shutdown_instances) - self.assertEqual(result, - self.rpc_client.execute_replica_tasks.return_value) - - def test_delete(self): - self.api.delete(self.ctxt, self.replica_id, self.execution_id) - - self.rpc_client.delete_replica_tasks_execution.assert_called_once_with( - self.ctxt, self.replica_id, self.execution_id) - - def test_cancel(self): - force = mock.sentinel.force - - self.api.cancel(self.ctxt, self.replica_id, self.execution_id, force) - - self.rpc_client.cancel_replica_tasks_execution.assert_called_once_with( - self.ctxt, self.replica_id, self.execution_id, force) - - def test_get_executions(self): - include_tasks = mock.sentinel.include_tasks - - result = self.api.get_executions(self.ctxt, self.replica_id, - include_tasks) - - self.rpc_client.get_replica_tasks_executions.assert_called_once_with( - self.ctxt, self.replica_id, include_tasks) - self.assertEqual( - result, self.rpc_client.get_replica_tasks_executions.return_value) - - def test_get_execution(self): - result = self.api.get_execution(self.ctxt, self.replica_id, - self.execution_id) - - self.rpc_client.get_replica_tasks_execution.assert_called_once_with( - self.ctxt, self.replica_id, self.execution_id) - self.assertEqual( - result, self.rpc_client.get_replica_tasks_execution.return_value) diff --git a/coriolis/tests/replicas/__init__.py b/coriolis/tests/replicas/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/coriolis/tests/tasks/test_osmorphing_tasks.py b/coriolis/tests/tasks/test_osmorphing_tasks.py index 4485fc0a2..13402f454 100644 --- a/coriolis/tests/tasks/test_osmorphing_tasks.py +++ b/coriolis/tests/tasks/test_osmorphing_tasks.py @@ -35,10 +35,10 @@ def test__run(self, mock_morph_image, mock_unmarshal, mock_get_provider, destination = mock.MagicMock() expected_calls = [ mock.call.mock_get_provider( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler), mock.call.mock_get_provider( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler), ] diff --git a/coriolis/tests/tasks/test_replica_tasks.py b/coriolis/tests/tasks/test_replica_tasks.py index a5d9679b6..b5f75a20f 100644 --- a/coriolis/tests/tasks/test_replica_tasks.py +++ b/coriolis/tests/tasks/test_replica_tasks.py @@ -75,7 +75,7 @@ def test__run(self, mock_validate_value, mock_get_conn_info, mock.sentinel.destiantion, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) prov_fun.assert_called_once_with( @@ -103,7 +103,7 @@ def test__run(self, mock_get_conn_info, mock_get_provider): mock.sentinel.destiantion, task_info, mock.sentinel.event_handler) self.assertEqual(result, {}) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) prov_fun.assert_called_once_with( @@ -157,7 +157,7 @@ def test__run(self, mock_unmarshal, mock_check_vol_info, mock_get_vol_info, mock.sentinel.destiantion, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) mock_get_vol_info.assert_called_once_with(task_info) @@ -196,7 +196,7 @@ def test__run(self, mock_check_vol_info, mock_validate_value, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -235,7 +235,7 @@ def test__run(self, mock_get_vol_info, mock_get_conn_info, mock.sentinel.destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, origin) @@ -278,7 +278,7 @@ def test__run(self, mock_get_vol_info, mock_get_conn_info, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -346,7 +346,7 @@ def _get_result(): self.assertEqual(_get_result(), expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, origin) @@ -377,7 +377,7 @@ def test__run(self, mock_get_conn_info, mock_get_provider): mock.sentinel.destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, origin) @@ -400,7 +400,7 @@ def test__run_no_resources(self, mock_get_conn_info, mock_get_provider): mock.sentinel.destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_TRANSFER_EXPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, origin) @@ -469,7 +469,7 @@ def test__run(self, mock_validate_value, mock_check_vol_info, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -511,7 +511,7 @@ def test__run(self, data, mock_get_conn_info, mock_get_provider): destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -548,7 +548,7 @@ def test__run(self, mock_get_vol_info, mock_get_conn_info, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -581,7 +581,7 @@ def test__run(self, mock_get_conn_info, mock_get_provider): destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -608,7 +608,7 @@ def test__run_no_result(self, mock_get_conn_info, mock_get_provider): mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -640,7 +640,7 @@ def test__run(self, mock_get_conn_info, mock_get_provider): destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -676,7 +676,7 @@ def test__run(self, mock_check_ensure_volumes_ordering, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -719,7 +719,7 @@ def test__run(self, mock_check_ensure_volumes_ordering, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -761,7 +761,7 @@ def test__run(self, mock_check_ensure_volumes_ordering, destination, task_info, mock.sentinel.event_handler) self.assertEqual(result, expected_result) mock_get_provider.assert_called_once_with( - destination['type'], constants.PROVIDER_TYPE_REPLICA_IMPORT, + destination['type'], constants.PROVIDER_TYPE_TRANSFER_IMPORT, mock.sentinel.event_handler) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -799,7 +799,7 @@ def test__run(self, mock_get_conn_info, mock_get_provider, self.assertEqual(result, {}) mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) prov_fun.assert_called_once_with( @@ -824,7 +824,7 @@ def test__run_no_source_provider(self, mock_get_conn_info, self.assertEqual(result, {}) mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_VALIDATE_REPLICA_EXPORT, + origin['type'], constants.PROVIDER_TYPE_VALIDATE_TRANSFER_EXPORT, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) prov_fun.assert_not_called() @@ -869,7 +869,7 @@ def test__run(self, mock_validate_replica_inputs, mock_get_conn_info, self.assertEqual(result, {}) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, None, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, None, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -896,7 +896,7 @@ def test__run_no_destination_provider( self.assertEqual(result, {}) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, None, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, None, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -920,7 +920,7 @@ def test__run_no_export_info( destination, task_info, None) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, None, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, None, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -960,7 +960,7 @@ def test__run(self, mock_validate_value, mock_get_conn_info, task_info['export_info'], schemas.CORIOLIS_VM_EXPORT_INFO_SCHEMA) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, mock.sentinel.event_handler, raise_if_not_found=False) prov_fun.assert_called_once_with( mock.sentinel.ctxt, mock_get_conn_info.return_value, @@ -991,7 +991,7 @@ def test__run_no_dest_provider( task_info['export_info'], schemas.CORIOLIS_VM_EXPORT_INFO_SCHEMA) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_VALIDATE_REPLICA_IMPORT, + constants.PROVIDER_TYPE_VALIDATE_TRANSFER_IMPORT, mock.sentinel.event_handler, raise_if_not_found=False) prov_fun.assert_not_called() @@ -1058,7 +1058,7 @@ def test__run_no_source_provider( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE, + origin['type'], constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_not_called() mock_validate_value.assert_not_called() @@ -1092,7 +1092,7 @@ def test__run_no_volumes_info( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE, + origin['type'], constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) mock_validate_value.assert_not_called() @@ -1127,7 +1127,7 @@ def test__run( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( - origin['type'], constants.PROVIDER_TYPE_SOURCE_REPLICA_UPDATE, + origin['type'], constants.PROVIDER_TYPE_SOURCE_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with(mock.sentinel.ctxt, origin) mock_validate_value.assert_called_once_with( @@ -1201,7 +1201,7 @@ def test__run_no_dest_provider( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE, + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_not_called() mock_validate_value.assert_not_called() @@ -1237,7 +1237,7 @@ def test__run_no_volumes_info( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE, + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) @@ -1276,7 +1276,7 @@ def test__run( mock_event_manager.assert_called_once_with(mock.sentinel.event_handler) mock_get_provider.assert_called_once_with( destination['type'], - constants.PROVIDER_TYPE_DESTINATION_REPLICA_UPDATE, + constants.PROVIDER_TYPE_DESTINATION_TRANSFER_UPDATE, mock.sentinel.event_handler, raise_if_not_found=False) mock_get_conn_info.assert_called_once_with( mock.sentinel.ctxt, destination) diff --git a/coriolis/replica_cron/__init__.py b/coriolis/tests/transfer_cron/__init__.py similarity index 100% rename from coriolis/replica_cron/__init__.py rename to coriolis/tests/transfer_cron/__init__.py diff --git a/coriolis/replica_cron/rpc/__init__.py b/coriolis/tests/transfer_cron/rpc/__init__.py similarity index 100% rename from coriolis/replica_cron/rpc/__init__.py rename to coriolis/tests/transfer_cron/rpc/__init__.py diff --git a/coriolis/tests/replica_cron/rpc/test_client.py b/coriolis/tests/transfer_cron/rpc/test_client.py similarity index 91% rename from coriolis/tests/replica_cron/rpc/test_client.py rename to coriolis/tests/transfer_cron/rpc/test_client.py index bcb9157c8..a6e9c0842 100644 --- a/coriolis/tests/replica_cron/rpc/test_client.py +++ b/coriolis/tests/transfer_cron/rpc/test_client.py @@ -3,8 +3,8 @@ from unittest import mock -from coriolis.replica_cron.rpc import client as rpc_client from coriolis.tests import test_base +from coriolis.transfer_cron.rpc import client as rpc_client class ReplicaCronClientTestCase(test_base.CoriolisBaseTestCase): @@ -12,7 +12,7 @@ class ReplicaCronClientTestCase(test_base.CoriolisBaseTestCase): def setUp(self): super(ReplicaCronClientTestCase, self).setUp() - self.client = rpc_client.ReplicaCronClient() + self.client = rpc_client.TransferCronClient() self.ctxt = mock.MagicMock() def test_register(self): diff --git a/coriolis/tests/replica_cron/rpc/test_server.py b/coriolis/tests/transfer_cron/rpc/test_server.py similarity index 71% rename from coriolis/tests/replica_cron/rpc/test_server.py rename to coriolis/tests/transfer_cron/rpc/test_server.py index ba06523a9..9481f6e5a 100644 --- a/coriolis/tests/replica_cron/rpc/test_server.py +++ b/coriolis/tests/transfer_cron/rpc/test_server.py @@ -9,55 +9,55 @@ from coriolis.conductor.rpc import client as rpc_client from coriolis import exception -from coriolis.replica_cron.rpc import server from coriolis.tests import test_base +from coriolis.transfer_cron.rpc import server -class TriggerReplicaTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis _trigger_replica function.""" +class TriggerTransferTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis _trigger_transfer function.""" - def test__trigger_replica(self): + def test__trigger_transfer(self): mock_conductor_client = mock.MagicMock() - mock_conductor_client.execute_replica_tasks.return_value = { + mock_conductor_client.execute_transfer_tasks.return_value = { 'id': mock.sentinel.id, 'action_id': mock.sentinel.action_id } - result = server._trigger_replica( + result = server._trigger_transfer( mock.sentinel.ctxt, mock_conductor_client, - mock.sentinel.replica_id, False) + mock.sentinel.transfer_id, False) - mock_conductor_client.execute_replica_tasks.assert_called_once_with( - mock.sentinel.ctxt, mock.sentinel.replica_id, False) + mock_conductor_client.execute_transfer_tasks.assert_called_once_with( + mock.sentinel.ctxt, mock.sentinel.transfer_id, False) self.assertEqual( - result, 'Execution %s for Replica %s' % ( + result, 'Execution %s for Transfer %s' % ( mock.sentinel.id, mock.sentinel.action_id)) - def test__trigger_replica_invalid_replica_state(self): + def test__trigger_transfer_invalid_transfer_state(self): mock_conductor_client = mock.MagicMock() - mock_conductor_client.execute_replica_tasks.side_effect = ( - exception.InvalidReplicaState(reason='test_reason')) + mock_conductor_client.execute_transfer_tasks.side_effect = ( + exception.InvalidTransferState(reason='test_reason')) - with self.assertLogs('coriolis.replica_cron.rpc.server', + with self.assertLogs('coriolis.transfer_cron.rpc.server', level=logging.INFO): - server._trigger_replica( + server._trigger_transfer( mock.sentinel.ctxt, mock_conductor_client, mock.sentinel.action_id, False) @ddt.ddt -class ReplicaCronServerEndpointTestCase(test_base.CoriolisBaseTestCase): - """Test suite for the Coriolis ReplicaCronServerEndpoint class.""" +class TransferCronServerEndpointTestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis TransferCronServerEndpoint class.""" - @mock.patch.object(server.ReplicaCronServerEndpoint, '_init_cron') + @mock.patch.object(server.TransferCronServerEndpoint, '_init_cron') def setUp(self, _): - super(ReplicaCronServerEndpointTestCase, self).setUp() - self.server = server.ReplicaCronServerEndpoint() + super(TransferCronServerEndpointTestCase, self).setUp() + self.server = server.TransferCronServerEndpoint() @ddt.data( { @@ -75,16 +75,16 @@ def test__deserialize_schedule(self, data): result = self.server._deserialize_schedule(data['input']) self.assertEqual(result, data['expected']) - @mock.patch.object(server.ReplicaCronServerEndpoint, + @mock.patch.object(server.TransferCronServerEndpoint, '_deserialize_schedule') - @mock.patch.object(server, '_trigger_replica') + @mock.patch.object(server, '_trigger_transfer') @mock.patch.object(server.timeutils, 'utcnow') @mock.patch.object(server.context, 'get_admin_context') @mock.patch.object(server.cron, 'CronJob') @mock.patch.object(server.cron.Cron, 'register') def test__register_schedule(self, mock_register, mock_cron_job, mock_get_admin_context, mock_utcnow, - mock_trigger_replica, + mock_trigger_transfer, mock_deserialize_schedule): mock_get_admin_context.return_value = 'test_admin_context' mock_utcnow.return_value = datetime.datetime(2022, 1, 1) @@ -96,7 +96,7 @@ def test__register_schedule(self, mock_register, mock_cron_job, } test_schedule = { 'trust_id': 'test_schedule_trust_id', - 'replica_id': 'test_schedule_replica_id', + 'transfer_id': 'test_schedule_transfer_id', 'shutdown_instance': 'test_schedule_shutdown_instance' } @@ -108,12 +108,12 @@ def test__register_schedule(self, mock_register, mock_cron_job, mock_cron_job.assert_called_once_with( 'test_id', 'Scheduled job for test_id', 'test_schedule', True, datetime.datetime(2022, 12, 31), None, None, - mock_trigger_replica, 'test_admin_context', - self.server._rpc_client, 'test_schedule_replica_id', + mock_trigger_transfer, 'test_admin_context', + self.server._rpc_client, 'test_schedule_transfer_id', 'test_schedule_shutdown_instance') mock_register.assert_called_once() - @mock.patch.object(server.ReplicaCronServerEndpoint, + @mock.patch.object(server.TransferCronServerEndpoint, '_deserialize_schedule') @mock.patch.object(server.timeutils, 'utcnow') def test__register_schedule_expired(self, mock_utcnow, @@ -127,19 +127,19 @@ def test__register_schedule_expired(self, mock_utcnow, } test_schedule = { 'trust_id': 'test_schedule_trust_id', - 'replica_id': 'test_schedule_replica_id', + 'transfer_id': 'test_schedule_transfer_id', 'shutdown_instance': 'test_schedule_shutdown_instance' } - with self.assertLogs('coriolis.replica_cron.rpc.server', + with self.assertLogs('coriolis.transfer_cron.rpc.server', level=logging.INFO): self.server._register_schedule(test_schedule) mock_deserialize_schedule.assert_called_once_with(test_schedule) @mock.patch.object(server.timeutils, 'utcnow') - @mock.patch.object(server.ReplicaCronServerEndpoint, '_get_all_schedules') - @mock.patch.object(server.ReplicaCronServerEndpoint, '_register_schedule') + @mock.patch.object(server.TransferCronServerEndpoint, '_get_all_schedules') + @mock.patch.object(server.TransferCronServerEndpoint, '_register_schedule') @mock.patch.object(server.cron.Cron, 'start') def test__init_cron(self, mock_cron_start, mock_register_schedule, mock_get_all_schedules, mock_utcnow): @@ -160,8 +160,8 @@ def test__init_cron(self, mock_cron_start, mock_register_schedule, ]) mock_cron_start.assert_called_once() - @mock.patch.object(server.ReplicaCronServerEndpoint, '_get_all_schedules') - @mock.patch.object(server.ReplicaCronServerEndpoint, '_register_schedule') + @mock.patch.object(server.TransferCronServerEndpoint, '_get_all_schedules') + @mock.patch.object(server.TransferCronServerEndpoint, '_register_schedule') def test__init_cron_with_exception(self, mock_register_schedule, mock_get_all_schedules): mock_get_all_schedules.return_value = [ @@ -170,7 +170,7 @@ def test__init_cron_with_exception(self, mock_register_schedule, ] mock_register_schedule.side_effect = Exception('test_exception') - with self.assertLogs('coriolis.replica_cron.rpc.server', + with self.assertLogs('coriolis.transfer_cron.rpc.server', level=logging.ERROR): self.server._init_cron() @@ -180,16 +180,16 @@ def test__init_cron_with_exception(self, mock_register_schedule, mock.call({'id': 'schedule2'}, date=mock.ANY), ]) - @mock.patch.object(rpc_client.ConductorClient, 'get_replica_schedules') - def test__get_all_schedules(self, mock_get_replica_schedules): + @mock.patch.object(rpc_client.ConductorClient, 'get_transfer_schedules') + def test__get_all_schedules(self, mock_get_transfer_schedules): result = self.server._get_all_schedules() - mock_get_replica_schedules.assert_called_once_with( + mock_get_transfer_schedules.assert_called_once_with( self.server._admin_ctx, expired=False) - self.assertEqual(result, mock_get_replica_schedules.return_value) + self.assertEqual(result, mock_get_transfer_schedules.return_value) - @mock.patch.object(server.ReplicaCronServerEndpoint, '_register_schedule') + @mock.patch.object(server.TransferCronServerEndpoint, '_register_schedule') @mock.patch.object(server.timeutils, 'utcnow') def test_register(self, mock_utcnow, mock_register_schedule): mock_utcnow.return_value = datetime.datetime(2022, 1, 1) diff --git a/coriolis/tests/transfer_cron/test_api.py b/coriolis/tests/transfer_cron/test_api.py new file mode 100644 index 000000000..94e57341b --- /dev/null +++ b/coriolis/tests/transfer_cron/test_api.py @@ -0,0 +1,69 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +from unittest import mock + +from coriolis.tests import test_base +from coriolis.transfer_cron import api as transfers_cron_module + + +class APITestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis API class.""" + + def setUp(self): + super(APITestCase, self).setUp() + self.api = transfers_cron_module.API() + self.rpc_client = mock.MagicMock() + self.api._rpc_client = self.rpc_client + self.ctxt = mock.sentinel.ctxt + self.transfer_id = mock.sentinel.transfer_id + self.schedule_id = mock.sentinel.schedule_id + + def test_create(self): + schedule = mock.sentinel.schedule + enabled = mock.sentinel.enabled + exp_date = mock.sentinel.exp_date + shutdown_instance = mock.sentinel.shutdown_instance + + result = self.api.create( + self.ctxt, self.transfer_id, schedule, enabled, exp_date, + shutdown_instance) + + self.rpc_client.create_transfer_schedule.assert_called_once_with( + self.ctxt, self.transfer_id, schedule, enabled, exp_date, + shutdown_instance) + self.assertEqual(result, + self.rpc_client.create_transfer_schedule.return_value) + + def test_get_schedules(self): + result = self.api.get_schedules(self.ctxt, self.transfer_id) + + self.rpc_client.get_transfer_schedules.assert_called_once_with( + self.ctxt, self.transfer_id, expired=True) + self.assertEqual(result, + self.rpc_client.get_transfer_schedules.return_value) + + def test_get_schedule(self): + result = self.api.get_schedule(self.ctxt, self.transfer_id, + self.schedule_id) + + self.rpc_client.get_transfer_schedule.assert_called_once_with( + self.ctxt, self.transfer_id, self.schedule_id, expired=True) + self.assertEqual(result, + self.rpc_client.get_transfer_schedule.return_value) + + def test_update(self): + update_values = mock.sentinel.update_values + + result = self.api.update(self.ctxt, self.transfer_id, self.schedule_id, + update_values) + + self.rpc_client.update_transfer_schedule.assert_called_once_with( + self.ctxt, self.transfer_id, self.schedule_id, update_values) + self.assertEqual(result, + self.rpc_client.update_transfer_schedule.return_value) + + def test_delete(self): + self.api.delete(self.ctxt, self.transfer_id, self.schedule_id) + self.rpc_client.delete_transfer_schedule.assert_called_once_with( + self.ctxt, self.transfer_id, self.schedule_id) diff --git a/coriolis/replica_tasks_executions/__init__.py b/coriolis/tests/transfer_tasks_executions/__init__.py similarity index 100% rename from coriolis/replica_tasks_executions/__init__.py rename to coriolis/tests/transfer_tasks_executions/__init__.py diff --git a/coriolis/tests/transfer_tasks_executions/test_api.py b/coriolis/tests/transfer_tasks_executions/test_api.py new file mode 100644 index 000000000..614dad7cc --- /dev/null +++ b/coriolis/tests/transfer_tasks_executions/test_api.py @@ -0,0 +1,67 @@ +# Copyright 2024 Cloudbase Solutions Srl +# All Rights Reserved. + +from unittest import mock + +from coriolis.tests import test_base +from coriolis.transfer_tasks_executions import api as transfers_module + + +class APITestCase(test_base.CoriolisBaseTestCase): + """Test suite for the Coriolis API class.""" + + def setUp(self): + super(APITestCase, self).setUp() + self.api = transfers_module.API() + self.rpc_client = mock.MagicMock() + self.api._rpc_client = self.rpc_client + self.ctxt = mock.sentinel.ctxt + self.transfer_id = mock.sentinel.transfer_id + self.execution_id = mock.sentinel.execution_id + + def test_create(self): + shutdown_instances = mock.sentinel.shutdown_instances + + result = self.api.create(self.ctxt, self.transfer_id, + shutdown_instances) + + self.rpc_client.execute_transfer_tasks.assert_called_once_with( + self.ctxt, self.transfer_id, shutdown_instances) + self.assertEqual(result, + self.rpc_client.execute_transfer_tasks.return_value) + + def test_delete(self): + self.api.delete(self.ctxt, self.transfer_id, self.execution_id) + + (self.rpc_client.delete_transfer_tasks_execution + .assert_called_once_with( + self.ctxt, self.transfer_id, self.execution_id)) + + def test_cancel(self): + force = mock.sentinel.force + + self.api.cancel(self.ctxt, self.transfer_id, self.execution_id, force) + + (self.rpc_client.cancel_transfer_tasks_execution + .assert_called_once_with( + self.ctxt, self.transfer_id, self.execution_id, force)) + + def test_get_executions(self): + include_tasks = mock.sentinel.include_tasks + + result = self.api.get_executions(self.ctxt, self.transfer_id, + include_tasks) + + self.rpc_client.get_transfer_tasks_executions.assert_called_once_with( + self.ctxt, self.transfer_id, include_tasks) + self.assertEqual( + result, self.rpc_client.get_transfer_tasks_executions.return_value) + + def test_get_execution(self): + result = self.api.get_execution(self.ctxt, self.transfer_id, + self.execution_id) + + self.rpc_client.get_transfer_tasks_execution.assert_called_once_with( + self.ctxt, self.transfer_id, self.execution_id) + self.assertEqual( + result, self.rpc_client.get_transfer_tasks_execution.return_value) diff --git a/coriolis/replicas/__init__.py b/coriolis/tests/transfers/__init__.py similarity index 100% rename from coriolis/replicas/__init__.py rename to coriolis/tests/transfers/__init__.py diff --git a/coriolis/tests/replicas/test_api.py b/coriolis/tests/transfers/test_api.py similarity index 54% rename from coriolis/tests/replicas/test_api.py rename to coriolis/tests/transfers/test_api.py index 5b7445f6c..8e415dea0 100644 --- a/coriolis/tests/replicas/test_api.py +++ b/coriolis/tests/transfers/test_api.py @@ -3,8 +3,8 @@ from unittest import mock -from coriolis.replicas import api as replicas_module from coriolis.tests import test_base +from coriolis.transfers import api as transfers_module class APITestCase(test_base.CoriolisBaseTestCase): @@ -12,11 +12,11 @@ class APITestCase(test_base.CoriolisBaseTestCase): def setUp(self): super(APITestCase, self).setUp() - self.api = replicas_module.API() + self.api = transfers_module.API() self.rpc_client = mock.MagicMock() self.api._rpc_client = self.rpc_client self.ctxt = mock.sentinel.ctxt - self.replica_id = mock.sentinel.replica_id + self.transfer_id = mock.sentinel.transfer_id def test_create(self): origin_endpoint_id = mock.sentinel.origin_endpoint_id @@ -32,55 +32,57 @@ def test_create(self): storage_mappings = mock.sentinel.storage_mappings result = self.api.create( - self.ctxt, origin_endpoint_id, destination_endpoint_id, + self.ctxt, mock.sentinel.transfer_scenario, + origin_endpoint_id, destination_endpoint_id, origin_minion_pool_id, destination_minion_pool_id, instance_osmorphing_minion_pool_mappings, source_environment, destination_environment, instances, network_map, storage_mappings) - self.rpc_client.create_instances_replica.assert_called_once_with( - self.ctxt, origin_endpoint_id, destination_endpoint_id, + self.rpc_client.create_instances_transfer.assert_called_once_with( + self.ctxt, mock.sentinel.transfer_scenario, + origin_endpoint_id, destination_endpoint_id, origin_minion_pool_id, destination_minion_pool_id, instance_osmorphing_minion_pool_mappings, source_environment, destination_environment, instances, network_map, storage_mappings, None, None) - self.assertEqual(result, - self.rpc_client.create_instances_replica.return_value) + self.assertEqual( + result, self.rpc_client.create_instances_transfer.return_value) def test_update(self): updated_properties = mock.sentinel.updated_properties - result = self.api.update(self.ctxt, self.replica_id, + result = self.api.update(self.ctxt, self.transfer_id, updated_properties) - self.rpc_client.update_replica.assert_called_once_with( - self.ctxt, self.replica_id, updated_properties) + self.rpc_client.update_transfer.assert_called_once_with( + self.ctxt, self.transfer_id, updated_properties) self.assertEqual(result, - self.rpc_client.update_replica.return_value) + self.rpc_client.update_transfer.return_value) def test_delete(self): - self.api.delete(self.ctxt, self.replica_id) - self.rpc_client.delete_replica.assert_called_once_with( - self.ctxt, self.replica_id) + self.api.delete(self.ctxt, self.transfer_id) + self.rpc_client.delete_transfer.assert_called_once_with( + self.ctxt, self.transfer_id) - def test_get_replicas(self): - result = self.api.get_replicas( + def test_get_transfers(self): + result = self.api.get_transfers( self.ctxt, include_tasks_executions=False, include_task_info=False) - self.rpc_client.get_replicas.assert_called_once_with( + self.rpc_client.get_transfers.assert_called_once_with( self.ctxt, False, include_task_info=False) - self.assertEqual(result, self.rpc_client.get_replicas.return_value) + self.assertEqual(result, self.rpc_client.get_transfers.return_value) - def test_get_replica(self): - result = self.api.get_replica(self.ctxt, self.replica_id) + def test_get_transfer(self): + result = self.api.get_transfer(self.ctxt, self.transfer_id) - self.rpc_client.get_replica.assert_called_once_with( - self.ctxt, self.replica_id, include_task_info=False) - self.assertEqual(result, self.rpc_client.get_replica.return_value) + self.rpc_client.get_transfer.assert_called_once_with( + self.ctxt, self.transfer_id, include_task_info=False) + self.assertEqual(result, self.rpc_client.get_transfer.return_value) def test_delete_disks(self): - result = self.api.delete_disks(self.ctxt, self.replica_id) + result = self.api.delete_disks(self.ctxt, self.transfer_id) - self.rpc_client.delete_replica_disks.assert_called_once_with( - self.ctxt, self.replica_id) + self.rpc_client.delete_transfer_disks.assert_called_once_with( + self.ctxt, self.transfer_id) self.assertEqual(result, - self.rpc_client.delete_replica_disks.return_value) + self.rpc_client.delete_transfer_disks.return_value) diff --git a/coriolis/tests/worker/rpc/test_server.py b/coriolis/tests/worker/rpc/test_server.py index c5d304101..b38180cb1 100644 --- a/coriolis/tests/worker/rpc/test_server.py +++ b/coriolis/tests/worker/rpc/test_server.py @@ -957,7 +957,7 @@ def test_validate_endpoint_source_environment( mock_get_provider.assert_called_once_with( mock.sentinel.source_platform_name, - constants.PROVIDER_TYPE_REPLICA_EXPORT, + constants.PROVIDER_TYPE_TRANSFER_EXPORT, None, ) mock_validate.assert_called_once_with( @@ -1140,11 +1140,11 @@ def call_validate_endpoint_connection(): "connection_info_schema" ), ( - constants.PROVIDER_TYPE_REPLICA_IMPORT, + constants.PROVIDER_TYPE_TRANSFER_IMPORT, "destination_environment_schema", ), ( - constants.PROVIDER_TYPE_REPLICA_EXPORT, + constants.PROVIDER_TYPE_TRANSFER_EXPORT, "source_environment_schema" ), ( diff --git a/coriolis/tests/migrations/__init__.py b/coriolis/transfer_cron/__init__.py similarity index 100% rename from coriolis/tests/migrations/__init__.py rename to coriolis/transfer_cron/__init__.py diff --git a/coriolis/transfer_cron/api.py b/coriolis/transfer_cron/api.py new file mode 100644 index 000000000..101c0eb48 --- /dev/null +++ b/coriolis/transfer_cron/api.py @@ -0,0 +1,31 @@ +# Copyright 2017 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.conductor.rpc import client as rpc_client + + +class API(object): + def __init__(self): + self._rpc_client = rpc_client.ConductorClient() + + def create(self, ctxt, transfer_id, schedule, enabled, + exp_date, shutdown_instance): + return self._rpc_client.create_transfer_schedule( + ctxt, transfer_id, schedule, enabled, exp_date, + shutdown_instance) + + def get_schedules(self, ctxt, transfer_id, expired=True): + return self._rpc_client.get_transfer_schedules( + ctxt, transfer_id, expired=expired) + + def get_schedule(self, ctxt, transfer_id, schedule_id, expired=True): + return self._rpc_client.get_transfer_schedule( + ctxt, transfer_id, schedule_id, expired=expired) + + def update(self, ctxt, transfer_id, schedule_id, update_values): + return self._rpc_client.update_transfer_schedule( + ctxt, transfer_id, schedule_id, update_values) + + def delete(self, ctxt, transfer_id, schedule_id): + self._rpc_client.delete_transfer_schedule( + ctxt, transfer_id, schedule_id) diff --git a/coriolis/tests/replica_cron/__init__.py b/coriolis/transfer_cron/rpc/__init__.py similarity index 100% rename from coriolis/tests/replica_cron/__init__.py rename to coriolis/transfer_cron/rpc/__init__.py diff --git a/coriolis/replica_cron/rpc/client.py b/coriolis/transfer_cron/rpc/client.py similarity index 75% rename from coriolis/replica_cron/rpc/client.py rename to coriolis/transfer_cron/rpc/client.py index 593796301..1483e2393 100644 --- a/coriolis/replica_cron/rpc/client.py +++ b/coriolis/transfer_cron/rpc/client.py @@ -9,11 +9,11 @@ VERSION = "1.0" -class ReplicaCronClient(rpc.BaseRPCClient): - def __init__(self, topic=constants.REPLICA_CRON_MAIN_MESSAGING_TOPIC): +class TransferCronClient(rpc.BaseRPCClient): + def __init__(self, topic=constants.TRANSFER_CRON_MAIN_MESSAGING_TOPIC): target = messaging.Target( topic=topic, version=VERSION) - super(ReplicaCronClient, self).__init__(target) + super(TransferCronClient, self).__init__(target) def register(self, ctxt, schedule): self._call(ctxt, 'register', schedule=schedule) diff --git a/coriolis/replica_cron/rpc/server.py b/coriolis/transfer_cron/rpc/server.py similarity index 85% rename from coriolis/replica_cron/rpc/server.py rename to coriolis/transfer_cron/rpc/server.py index 1b2d6a3a3..7ea9aab15 100644 --- a/coriolis/replica_cron/rpc/server.py +++ b/coriolis/transfer_cron/rpc/server.py @@ -17,19 +17,19 @@ VERSION = "1.0" -def _trigger_replica(ctxt, conductor_client, replica_id, shutdown_instance): +def _trigger_transfer(ctxt, conductor_client, transfer_id, shutdown_instance): try: - execution = conductor_client.execute_replica_tasks( - ctxt, replica_id, shutdown_instance) - result_msg = 'Execution %s for Replica %s' % ( + execution = conductor_client.execute_transfer_tasks( + ctxt, transfer_id, shutdown_instance) + result_msg = 'Execution %s for Transfer %s' % ( execution.get('id'), execution.get('action_id')) return result_msg - except (exception.InvalidReplicaState, + except (exception.InvalidTransferState, exception.InvalidActionTasksExecutionState): LOG.info("A replica or migration already running") -class ReplicaCronServerEndpoint(object): +class TransferCronServerEndpoint(object): def __init__(self): self._rpc_client = rpc_client.ConductorClient() @@ -61,8 +61,8 @@ def _register_schedule(self, schedule, date=None): job = cron.CronJob( sched["id"], description, sched["schedule"], sched["enabled"], sched["expiration_date"], - None, None, _trigger_replica, trust_ctxt, - self._rpc_client, schedule["replica_id"], + None, None, _trigger_transfer, trust_ctxt, + self._rpc_client, schedule["transfer_id"], schedule["shutdown_instance"]) self._cron.register(job) @@ -81,7 +81,7 @@ def _init_cron(self): self._cron.start() def _get_all_schedules(self): - schedules = self._rpc_client.get_replica_schedules( + schedules = self._rpc_client.get_transfer_schedules( self._admin_ctx, expired=False) return schedules diff --git a/coriolis/tests/replica_cron/rpc/__init__.py b/coriolis/transfer_tasks_executions/__init__.py similarity index 100% rename from coriolis/tests/replica_cron/rpc/__init__.py rename to coriolis/transfer_tasks_executions/__init__.py diff --git a/coriolis/transfer_tasks_executions/api.py b/coriolis/transfer_tasks_executions/api.py new file mode 100644 index 000000000..ab293e03a --- /dev/null +++ b/coriolis/transfer_tasks_executions/api.py @@ -0,0 +1,29 @@ +# Copyright 2016 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.conductor.rpc import client as rpc_client + + +class API(object): + def __init__(self): + self._rpc_client = rpc_client.ConductorClient() + + def create(self, ctxt, transfer_id, shutdown_instances): + return self._rpc_client.execute_transfer_tasks( + ctxt, transfer_id, shutdown_instances) + + def delete(self, ctxt, transfer_id, execution_id): + self._rpc_client.delete_transfer_tasks_execution( + ctxt, transfer_id, execution_id) + + def cancel(self, ctxt, transfer_id, execution_id, force): + self._rpc_client.cancel_transfer_tasks_execution( + ctxt, transfer_id, execution_id, force) + + def get_executions(self, ctxt, transfer_id, include_tasks=False): + return self._rpc_client.get_transfer_tasks_executions( + ctxt, transfer_id, include_tasks) + + def get_execution(self, ctxt, transfer_id, execution_id): + return self._rpc_client.get_transfer_tasks_execution( + ctxt, transfer_id, execution_id) diff --git a/coriolis/tests/replica_tasks_executions/__init__.py b/coriolis/transfers/__init__.py similarity index 100% rename from coriolis/tests/replica_tasks_executions/__init__.py rename to coriolis/transfers/__init__.py diff --git a/coriolis/transfers/api.py b/coriolis/transfers/api.py new file mode 100644 index 000000000..119e73a66 --- /dev/null +++ b/coriolis/transfers/api.py @@ -0,0 +1,43 @@ +# Copyright 2016 Cloudbase Solutions Srl +# All Rights Reserved. + +from coriolis.conductor.rpc import client as rpc_client + + +class API(object): + def __init__(self): + self._rpc_client = rpc_client.ConductorClient() + + def create(self, ctxt, transfer_scenario, + origin_endpoint_id, destination_endpoint_id, + origin_minion_pool_id, destination_minion_pool_id, + instance_osmorphing_minion_pool_mappings, + source_environment, destination_environment, instances, + network_map, storage_mappings, notes=None, user_scripts=None): + return self._rpc_client.create_instances_transfer( + ctxt, transfer_scenario, + origin_endpoint_id, destination_endpoint_id, + origin_minion_pool_id, destination_minion_pool_id, + instance_osmorphing_minion_pool_mappings, + source_environment, destination_environment, instances, + network_map, storage_mappings, notes, user_scripts) + + def update(self, ctxt, transfer_id, updated_properties): + return self._rpc_client.update_transfer( + ctxt, transfer_id, updated_properties) + + def delete(self, ctxt, transfer_id): + self._rpc_client.delete_transfer(ctxt, transfer_id) + + def get_transfers(self, ctxt, include_tasks_executions=False, + include_task_info=False): + return self._rpc_client.get_transfers( + ctxt, include_tasks_executions, + include_task_info=include_task_info) + + def get_transfer(self, ctxt, transfer_id, include_task_info=False): + return self._rpc_client.get_transfer( + ctxt, transfer_id, include_task_info=include_task_info) + + def delete_disks(self, ctxt, transfer_id): + return self._rpc_client.delete_transfer_disks(ctxt, transfer_id) diff --git a/coriolis/worker/rpc/server.py b/coriolis/worker/rpc/server.py index e96ace881..c463239a8 100644 --- a/coriolis/worker/rpc/server.py +++ b/coriolis/worker/rpc/server.py @@ -513,7 +513,7 @@ def validate_endpoint_target_environment( def validate_endpoint_source_environment( self, ctxt, platform_name, source_env): provider = providers_factory.get_provider( - platform_name, constants.PROVIDER_TYPE_REPLICA_EXPORT, None) + platform_name, constants.PROVIDER_TYPE_TRANSFER_EXPORT, None) source_env_schema = provider.get_source_environment_schema() is_valid = True @@ -608,11 +608,11 @@ def get_provider_schemas(self, ctxt, platform_name, provider_type): schema = provider.get_connection_info_schema() schemas["connection_info_schema"] = schema - if provider_type == constants.PROVIDER_TYPE_REPLICA_IMPORT: + if provider_type == constants.PROVIDER_TYPE_TRANSFER_IMPORT: schema = provider.get_target_environment_schema() schemas["destination_environment_schema"] = schema - if provider_type == constants.PROVIDER_TYPE_REPLICA_EXPORT: + if provider_type == constants.PROVIDER_TYPE_TRANSFER_EXPORT: schema = provider.get_source_environment_schema() schemas["source_environment_schema"] = schema diff --git a/etc/coriolis/policy.yaml b/etc/coriolis/policy.yaml index 0c6654c34..7622de595 100644 --- a/etc/coriolis/policy.yaml +++ b/etc/coriolis/policy.yaml @@ -22,6 +22,13 @@ "migration:migrations:cancel": "rule:admin_or_owner" "migration:migrations:delete": "rule:admin_or_owner" +"migration:deployments:create": "rule:admin_or_owner" +"migration:deployments:list": "rule:admin_or_owner" +"migration:deployments:show": "rule:admin_or_owner" +"migration:deployments:show_execution": "rule:admin_or_owner" +"migration:deployments:cancel": "rule:admin_or_owner" +"migration:deployments:delete": "rule:admin_or_owner" + "migration:replicas:create": "rule:admin_or_owner" "migration:replicas:list": "rule:admin_or_owner" "migration:replicas:show": "rule:admin_or_owner" diff --git a/requirements.txt b/requirements.txt index 0efdd3b92..bb2b6949f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,7 @@ jsonschema # so we limit its version here. kombu==4.6.10 PyMySQL +netifaces oslo.cache oslo.concurrency oslo.config diff --git a/setup.cfg b/setup.cfg index d1d76d9ad..700f776c2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,7 +28,7 @@ console_scripts = coriolis-api = coriolis.cmd.api:main coriolis-conductor = coriolis.cmd.conductor:main coriolis-worker = coriolis.cmd.worker:main - coriolis-replica-cron = coriolis.cmd.replica_cron:main + coriolis-transfer-cron = coriolis.cmd.transfer_cron:main coriolis-scheduler= coriolis.cmd.scheduler:main coriolis-minion-manager= coriolis.cmd.minion_manager:main coriolis-dbsync = coriolis.cmd.db_sync:main diff --git a/tox.ini b/tox.ini index e4c89e17f..432a134f1 100644 --- a/tox.ini +++ b/tox.ini @@ -36,5 +36,5 @@ omit = coriolis/tests/* # E125 is deliberately excluded. See https://github.com/jcrocholl/pep8/issues/126 # E251 Skipped due to https://github.com/jcrocholl/pep8/issues/301 -ignore = E125,E251,W503,W504,E305,E731,E117,W605,F632,H401,H403,H404,H405 +ignore = E125,E251,W503,W504,E305,E731,E117,W605,F632,H401,H403,H404,H405,H202 exclude = .venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,tools