diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..b01d248f --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,17 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.linting.pylintEnabled": false, + "python.linting.prospectorEnabled": true, + "python.linting.prospectorArgs": [ + "-t", "dodgy", + "-t", "mccabe", + "-t", "profile-validator", + "-t", "pyflakes", + "-t", "pylint" + ], + "python.linting.enabled": true +} diff --git a/databricks_cli/unity_catalog/cred_cli.py b/databricks_cli/unity_catalog/cred_cli.py index 5cb9544c..1490a515 100644 --- a/databricks_cli/unity_catalog/cred_cli.py +++ b/databricks_cli/unity_catalog/cred_cli.py @@ -21,6 +21,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools + import click from databricks_cli.click_types import JsonClickType @@ -33,9 +35,91 @@ ############# Storage Credential Commands ############ +def fill_credential( + data, aws_iam_role_arn, az_sp_directory_id, az_sp_application_id, + az_sp_client_secret, az_mi_access_connector_id, az_mi_id, gcp_sak_email, + gcp_sak_private_key_id, gcp_sak_private_key): + if aws_iam_role_arn is not None: + data['aws_iam_role'] = { + 'role_arn': aws_iam_role_arn + } + + if ((az_sp_directory_id is not None) or (az_sp_application_id is not None) or + (az_sp_client_secret is not None)): + data['azure_service_principal'] = { + 'directory_id': az_sp_directory_id, + 'application_id': az_sp_application_id, + 'client_secret': az_sp_client_secret + } + + if (az_mi_access_connector_id is not None) or (az_mi_id is not None): + data['azure_managed_identity'] = { + 'access_connector_id': az_mi_access_connector_id, + 'managed_identity_id': az_mi_id + } + + if ((gcp_sak_email is not None) or (gcp_sak_private_key_id is not None) or + (gcp_sak_private_key is not None)): + data['gcp_service_account_key'] = { + 'email': gcp_sak_email, + 'private_key_id': gcp_sak_private_key_id, + 'private_key': gcp_sak_private_key + } + + +def create_update_common_options(f): + @click.option('--aws-iam-role-arn', default=None, + help='The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access.') + @click.option('--az-sp-directory-id', default=None, + help=( + 'The directory ID corresponding to the Azure Active Directory (AAD) ' + 'tenant of the application.')) + @click.option('--az-sp-application-id', default=None, + help=( + 'The application ID of the application registration within the referenced ' + 'AAD tenant.')) + @click.option('--az-sp-client-secret', default=None, + help='The client secret generated for the above app ID in AAD.') + @click.option('--az-mi-access-connector-id', default=None, + help=( + 'The Azure resource ID of the Azure Databricks Access Connector. ' + 'Use the format, ' + '/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks' + '/accessConnectors/{connector-name} .')) + @click.option('--az-mi-id', default=None, + help=( + 'The Azure resource ID of the managed identity. Use the format, ' + '/subscriptions/{guid}/resourceGroups/{rg-name}/providers' + '/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name} .' + 'This is only available for user-assigned identities. ' + 'For system-assigned identities, access-connector-id is used to identify ' + 'the identity. If this flag is not provided, ' + 'then we assume that it is using the system-assigned identity.')) + @click.option('--gcp-sak-email', default=None, + help=( + 'Credential for GCP Service Account Key. ' + 'The email of the service account.')) + @click.option('--gcp-sak-private-key-id', default=None, + help=( + 'Credential for GCP Service Account Key. ' + 'The ID of the service account\'s private key.')) + @click.option('--gcp-sak-private-key', default=None, + help=( + 'Credential for GCP Service Account Key. ' + 'The service account\'s RSA private key.')) + @click.option('--comment', default=None, + help='Free-form text description.') + @functools.wraps(f) + def wrapper(*args, **kwargs): + f(*args, **kwargs) + return wrapper + @click.command(context_settings=CONTEXT_SETTINGS, short_help='Create storage credential.') +@click.option('--name', default=None, + help='Name of new storage credential') +@create_update_common_options @click.option('--skip-validation', '-s', 'skip_val', is_flag=True, default=False, help='Skip the validation of new credential info before creation') @click.option('--json-file', default=None, type=click.Path(), @@ -50,16 +134,42 @@ # Until that is fixed (should return a 400), show full error trace. #@eat_exceptions @provide_api_client -def create_credential_cli(api_client, skip_val, json_file, json): +def create_credential_cli(api_client, name, aws_iam_role_arn, + az_sp_directory_id, az_sp_application_id, az_sp_client_secret, + az_mi_access_connector_id, az_mi_id, gcp_sak_email, + gcp_sak_private_key_id, gcp_sak_private_key, comment, + skip_val, json_file, json): """ Create new storage credential. The public specification for the JSON request is in development. """ - json_cli_base(json_file, json, - lambda json: UnityCatalogApi(api_client).create_storage_credential(json, - skip_val), - encode_utf8=True) + has_credential_flag = ( + (aws_iam_role_arn is not None) or + (az_sp_directory_id is not None) or (az_sp_application_id is not None) or + (az_sp_client_secret is not None) or (az_mi_access_connector_id is not None) or + (az_mi_id is not None) or (gcp_sak_email is not None) or + (gcp_sak_private_key_id is not None) or (gcp_sak_private_key is not None)) + if ((name is not None) or has_credential_flag or (comment is not None)): + if (json_file is not None) or (json is not None): + raise ValueError('Cannot specify JSON if any other creation flags are specified') + data = { + 'name': name, + 'comment': comment + } + + fill_credential( + data, aws_iam_role_arn, az_sp_directory_id, az_sp_application_id, + az_sp_client_secret, az_mi_access_connector_id, az_mi_id, gcp_sak_email, + gcp_sak_private_key_id, gcp_sak_private_key) + + cred_json = UnityCatalogApi(api_client).create_storage_credential(data, skip_val) + click.echo(mc_pretty_format(cred_json)) + else: + json_cli_base(json_file, json, + lambda json: UnityCatalogApi(api_client).create_storage_credential(json, + skip_val), + encode_utf8=True) @click.command(context_settings=CONTEXT_SETTINGS, @@ -98,6 +208,10 @@ def get_credential_cli(api_client, name): short_help='Update a storage credential.') @click.option('--name', required=True, help='Name of the storage credential to update.') +@click.option('--new-name', default=None, help='New name of the storage credential.') +@create_update_common_options +@click.option('--owner', default=None, + help='Owner of the storage credential.') @click.option('--skip-validation', '-s', 'skip_val', is_flag=True, default=False, help='Skip the validation of new credential info before update') @click.option('--json-file', default=None, type=click.Path(), @@ -109,17 +223,45 @@ def get_credential_cli(api_client, name): # See comment for create-storage-credential #@eat_exceptions @provide_api_client -def update_credential_cli(api_client, name, skip_val, json_file, json): +def update_credential_cli(api_client, name, new_name, aws_iam_role_arn, + az_sp_directory_id, az_sp_application_id, az_sp_client_secret, + az_mi_access_connector_id, az_mi_id, gcp_sak_email, + gcp_sak_private_key_id, gcp_sak_private_key, comment, owner, + skip_val, json_file, json): """ Update a storage credential. The public specification for the JSON request is in development. """ - json_cli_base(json_file, json, - lambda json: UnityCatalogApi(api_client).update_storage_credential(name, - json, - skip_val), - encode_utf8=True) + has_credential_flag = ( + (aws_iam_role_arn is not None) or + (az_sp_directory_id is not None) or (az_sp_application_id is not None) or + (az_sp_client_secret is not None) or (az_mi_access_connector_id is not None) or + (az_mi_id is not None) or (gcp_sak_email is not None) or + (gcp_sak_private_key_id is not None) or (gcp_sak_private_key is not None)) + if ((new_name is not None) or has_credential_flag or + (comment is not None) or (owner is not None)): + if (json_file is not None) or (json is not None): + raise ValueError('Cannot specify JSON if any other update flags are specified') + data = { + 'name': new_name, + 'comment': comment, + 'owner': owner + } + + fill_credential( + data, aws_iam_role_arn, az_sp_directory_id, az_sp_application_id, + az_sp_client_secret, az_mi_access_connector_id, az_mi_id, gcp_sak_email, + gcp_sak_private_key_id, gcp_sak_private_key) + + cred_json = UnityCatalogApi(api_client).update_storage_credential(name, data, skip_val) + click.echo(mc_pretty_format(cred_json)) + else: + json_cli_base(json_file, json, + lambda json: UnityCatalogApi(api_client).update_storage_credential(name, + json, + skip_val), + encode_utf8=True) @click.command(context_settings=CONTEXT_SETTINGS, diff --git a/databricks_cli/unity_catalog/ext_loc_cli.py b/databricks_cli/unity_catalog/ext_loc_cli.py index 6649bac5..aeba2be2 100644 --- a/databricks_cli/unity_catalog/ext_loc_cli.py +++ b/databricks_cli/unity_catalog/ext_loc_cli.py @@ -21,6 +21,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools + import click from databricks_cli.click_types import JsonClickType @@ -31,14 +33,26 @@ from databricks_cli.utils import eat_exceptions, CONTEXT_SETTINGS, json_cli_base +def create_update_common_options(f): + @click.option('--url', default=None, + help='Path URL for the new external location') + @click.option('--storage-credential-name', default=None, + help='Name of storage credential to use with new external location') + @click.option('--read-only/--no-read-only', is_flag=True, default=None, + help='Whether the external location is read-only') + @click.option('--comment', default=None, + help='Free-form text description.') + @functools.wraps(f) + def wrapper(*args, **kwargs): + f(*args, **kwargs) + return wrapper + + @click.command(context_settings=CONTEXT_SETTINGS, short_help='Create External Location.') @click.option('--name', default=None, help='Name of new external location') -@click.option('--url', default=None, - help='Path URL for the new external location') -@click.option('--storage-credential-name', default=None, - help='Name of storage credential to use with new external location') +@create_update_common_options @click.option('--skip-validation', '-s', 'skip_val', is_flag=True, default=False, help='Skip the validation of location\'s storage credential before creation') @click.option('--json-file', default=None, type=click.Path(), @@ -53,16 +67,24 @@ # Until that is fixed (should return a 400), show full error trace. #@eat_exceptions @provide_api_client -def create_location_cli(api_client, name, url, storage_credential_name, skip_val, json_file, json): +def create_location_cli(api_client, name, url, storage_credential_name, + read_only, comment, skip_val, json_file, json): """ Create new external location. The public specification for the JSON request is in development. """ - if (name is not None) and (url is not None) and (storage_credential_name is not None): + if ((name is not None) or (url is not None) or (storage_credential_name is not None) or + (read_only is not None) or (comment is not None)): if (json_file is not None) or (json is not None): - raise ValueError('Cannot specify JSON if both name and url are given') - data = {"name": name, "url": url, "credential_name": storage_credential_name} + raise ValueError('Cannot specify JSON if any other creation flags are specified') + data = { + 'name': name, + 'url': url, + 'credential_name': storage_credential_name, + 'read_only': read_only, + 'comment': comment + } loc_json = UnityCatalogApi(api_client).create_external_location(data, skip_val) click.echo(mc_pretty_format(loc_json)) elif (json is None) and (json_file is None): @@ -109,10 +131,14 @@ def get_location_cli(api_client, name): short_help='Update an external location.') @click.option('--name', required=True, help='Name of the external location to update.') +@click.option('--new-name', default=None, help='New name of the external location.') +@create_update_common_options +@click.option('--owner', default=None, + help='Owner of the external location.') @click.option('--force', '-f', is_flag=True, default=False, help='Force update even if location has dependent tables/mounts') @click.option('--skip-validation', '-s', 'skip_val', is_flag=True, default=False, - help='Skip the validation of location\'s storage credential before creation') + help='Skip the validation of location\'s storage credential') @click.option('--json-file', default=None, type=click.Path(), help=json_file_help(method='PATCH', path='/external-locations/{name}')) @click.option('--json', default=None, type=JsonClickType(), @@ -122,17 +148,34 @@ def get_location_cli(api_client, name): # See comment for create_location_cli #@eat_exceptions @provide_api_client -def update_location_cli(api_client, name, force, skip_val, json_file, json): +def update_location_cli(api_client, name, new_name, url, storage_credential_name, read_only, + comment, owner, force, skip_val, json_file, json): """ Update an external location. The public specification for the JSON request is in development. """ - json_cli_base(json_file, json, - lambda json: UnityCatalogApi(api_client).update_external_location(name, json, - force, - skip_val), - encode_utf8=True) + if ((new_name is not None) or (storage_credential_name is not None) or + (read_only is not None) or (comment is not None)): + if (json_file is not None) or (json is not None): + raise ValueError('Cannot specify JSON if any other update flags are specified') + data = { + 'name': new_name, + 'url': url, + 'credential_name': storage_credential_name, + 'read_only': read_only, + 'comment': comment, + 'owner': owner + } + loc_json = UnityCatalogApi(api_client).update_external_location( + name, data, force, skip_val) + click.echo(mc_pretty_format(loc_json)) + else: + json_cli_base(json_file, json, + lambda json: UnityCatalogApi(api_client).update_external_location(name, json, + force, + skip_val), + encode_utf8=True) @click.command(context_settings=CONTEXT_SETTINGS, @@ -163,17 +206,30 @@ def delete_location_cli(api_client, name, force): @click.option('--cred-aws-iam-role', default=None, help='An aws role to validate') @click.option('--cred-az-directory-id', default=None, - help='An Azure directory id to validate') + help='An Azure Service Principal directory id to validate') @click.option('--cred-az-application-id', default=None, - help='An Azure application id to validate') + help='An Azure Service Principal application id to validate') @click.option('--cred-az-client-secret', default=None, - help='An Azure directory id to validate') + help='An Azure Service Principal directory id to validate') +@click.option('--cred-az-mi-access-connector-id', default=None, + help='An Azure Managed Identity access connector id to validate') +@click.option('--cred-az-mi-id', default=None, + help='An Azure Managed Identity id to validate') +@click.option('--cred-gcp-sak-email', default=None, + help='A GCP Service Account Key email to validate') +@click.option('--cred-gcp-sak-private-key-id', default=None, + help='A GCP Service Account Key private key ID to validate') +@click.option('--cred-gcp-sak-private-key', default=None, + help='A GCP Service Account Key private key to validate') @debug_option @profile_option @eat_exceptions @provide_api_client def validate_location_cli(api_client, name, url, cred_name, cred_aws_iam_role, cred_az_directory_id, - cred_az_application_id, cred_az_client_secret): + cred_az_application_id, cred_az_client_secret, + cred_az_mi_access_connector_id, + cred_az_mi_id, cred_gcp_sak_email, cred_gcp_sak_private_key_id, + cred_gcp_sak_private_key): """ Validate an external location/credential combination. @@ -188,21 +244,37 @@ def validate_location_cli(api_client, name, url, cred_name, cred_aws_iam_role, c provided. """ validation_spec = { - "external_location_name": name, - "url": url, - "storage_credential_name": cred_name, + 'external_location_name': name, + 'url': url, + 'storage_credential_name': cred_name, } if cred_aws_iam_role is not None: - validation_spec["aws_iam_role"] = { - "role_arn": cred_aws_iam_role + validation_spec['aws_iam_role'] = { + 'role_arn': cred_aws_iam_role + } + + if ((cred_az_directory_id is not None) or (cred_az_application_id is not None) or + (cred_az_client_secret is not None)): + validation_spec['azure_service_principal'] = { + 'directory_id': cred_az_directory_id, + 'application_id': cred_az_application_id, + 'client_secret': cred_az_client_secret + } + + if (cred_az_mi_access_connector_id is not None) or (cred_az_mi_id is not None): + validation_spec['azure_managed_identity'] = { + 'access_connector_id': cred_az_mi_access_connector_id, + 'managed_identity_id': cred_az_mi_id } - if cred_az_directory_id is not None: - validation_spec["azure_service_principal"] = { - "directory_id": cred_az_directory_id, - "application_id": cred_az_application_id, - "client_secret": cred_az_client_secret + if ((cred_gcp_sak_email is not None) or (cred_gcp_sak_private_key_id is not None) or + (cred_gcp_sak_private_key is not None)): + validation_spec['gcp_service_account_key'] = { + 'email': cred_gcp_sak_email, + 'private_key_id': cred_gcp_sak_private_key_id, + 'private_key': cred_gcp_sak_private_key } + del_none(validation_spec) validation_json = UnityCatalogApi(api_client).validate_external_location(validation_spec) click.echo(mc_pretty_format(validation_json)) diff --git a/databricks_cli/unity_catalog/metastore_cli.py b/databricks_cli/unity_catalog/metastore_cli.py index 784ac201..d5869dd1 100644 --- a/databricks_cli/unity_catalog/metastore_cli.py +++ b/databricks_cli/unity_catalog/metastore_cli.py @@ -88,6 +88,21 @@ def get_metastore_cli(api_client, metastore_id): short_help='Update a metastore.') @click.option('--id', 'metastore_id', required=True, type=MetastoreIdClickType(), help='Unique identifier of the metastore to update.') +@click.option('--new-name', default=None, help='New name of the metastore.') +@click.option('--storage-root-credential-id', default=None, + help='Storage Credential ID to access storage root.') +@click.option('--delta-sharing-scope', default=None, + help='Delta sharing scope. Can be INTERNAL or INTERNAL_AND_EXTERNAL.') +@click.option('--delta-sharing-recipient-token-lifetime-in-seconds', default=None, type=int, + help=( + 'The token lifetime determines how long a generated Delta Sharing token is ' + 'valid for. 0 for no expiration.')) +@click.option('--delta-sharing-organization-name', default=None, + help=( + 'The organization name of a Delta Sharing entity. ' + 'The name will be used in Databricks-to-Databricks Delta Sharing as the official ' + 'name.')) +@click.option('--owner', default=None, help='Owner of the metastore.') @click.option('--json-file', default=None, type=click.Path(), help=json_file_help(method='PATCH', path='/metastores/{id}')) @click.option('--json', default=None, type=JsonClickType(), @@ -96,13 +111,36 @@ def get_metastore_cli(api_client, metastore_id): @profile_option @eat_exceptions @provide_api_client -def update_metastore_cli(api_client, metastore_id, json_file, json): +def update_metastore_cli(api_client, metastore_id, new_name, + storage_root_credential_id, + delta_sharing_scope, delta_sharing_recipient_token_lifetime_in_seconds, + delta_sharing_organization_name, owner, json_file, json): """ Update a metastore. The public specification for the JSON request is in development. """ - json_cli_base(json_file, json, + has_delta_sharing_flag = ( + (delta_sharing_scope is not None) or + (delta_sharing_recipient_token_lifetime_in_seconds is not None) or + (delta_sharing_organization_name is not None)) + if ((new_name is not None) or + (storage_root_credential_id is not None) or has_delta_sharing_flag or (owner is not None)): + if (json_file is not None) or (json is not None): + raise ValueError('Cannot specify JSON if any other update flags are specified') + data = { + 'name': new_name, + 'storage_root_credential_id': storage_root_credential_id, + 'delta_sharing_scope': delta_sharing_scope, + 'delta_sharing_recipient_token_lifetime_in_seconds': + delta_sharing_recipient_token_lifetime_in_seconds, + 'delta_sharing_organization_name': delta_sharing_organization_name, + 'owner': owner + } + metastore_json = UnityCatalogApi(api_client).update_metastore(metastore_id, data) + click.echo(mc_pretty_format(metastore_json)) + else: + json_cli_base(json_file, json, lambda json: UnityCatalogApi(api_client).update_metastore(metastore_id, json)) diff --git a/prospector.yaml b/prospector.yaml index 5576da5b..4768ea02 100644 --- a/prospector.yaml +++ b/prospector.yaml @@ -33,6 +33,7 @@ pylint: - redundant-u-string-prefix - consider-using-from-import - missing-timeout + - too-many-locals mccabe: disable: diff --git a/tests/unity_catalog/test_cred_cli.py b/tests/unity_catalog/test_cred_cli.py new file mode 100644 index 00000000..b06bcf62 --- /dev/null +++ b/tests/unity_catalog/test_cred_cli.py @@ -0,0 +1,217 @@ +# Databricks CLI +# Copyright 2017 Databricks, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"), except +# that the use of services to which certain application programming +# interfaces (each, an "API") connect requires that the user first obtain +# a license for the use of the APIs from Databricks, Inc. ("Databricks"), +# by creating an account at www.databricks.com and agreeing to either (a) +# the Community Edition Terms of Service, (b) the Databricks Terms of +# Service, or (c) another written agreement between Licensee and Databricks +# for the use of the APIs. +# +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint:disable=redefined-outer-name + +import mock +import pytest +from click.testing import CliRunner +from databricks_cli.unity_catalog.utils import mc_pretty_format + +from databricks_cli.unity_catalog import cred_cli +from tests.utils import provide_conf + +STORAGE_CREDENTIAL_NAME = 'test_storage_credential_name' +AWS_IAM_ROLE_ARN = 'test_aws_iam_role_arn' +AZ_SP_DIRECTORY_ID = 'test_az_sp_directory_id' +AZ_SP_APPLICATION_ID = 'test_az_sp_application_id' +AZ_SP_CLIENT_SEC = 'test_az_sp_client_secret' # Named as such to suppress dodgy lint warnings +AZ_MI_ACCESS_CONNECTOR_ID = 'test_mi_access_connector_id' +AZ_MI_ID = 'test_mi_mid' +GCP_SAK_EMAIL = 'test_sak_email' +GCP_SAK_PRIVATE_KEY_ID = 'test_sak_private_key_id' +GCP_SAK_PRIVATE_KEY = 'test_sak_private_key' +COMMENT = 'some_comment' +STORAGE_CREDENTIALS = { + 'storage_credentials': [ + { + 'name': STORAGE_CREDENTIAL_NAME + } + ] +} +STORAGE_CREDENTIAL = { + 'name': STORAGE_CREDENTIAL_NAME, + 'aws_iam_role': { + 'role_arn': AWS_IAM_ROLE_ARN + }, + 'azure_service_principal': { + 'directory_id': AZ_SP_DIRECTORY_ID, + 'application_id': AZ_SP_APPLICATION_ID, + 'client_secret': AZ_SP_CLIENT_SEC + }, + 'azure_managed_identity': { + 'access_connector_id': AZ_MI_ACCESS_CONNECTOR_ID, + 'managed_identity_id': AZ_MI_ID + }, + 'gcp_service_account_key': { + 'email': GCP_SAK_EMAIL, + 'private_key_id': GCP_SAK_PRIVATE_KEY_ID, + 'private_key': GCP_SAK_PRIVATE_KEY + }, + 'comment': COMMENT +} + + +@pytest.fixture() +def api_mock(): + with mock.patch( + 'databricks_cli.unity_catalog.cred_cli.UnityCatalogApi') as uc_api_mock: + _cred_api_mock = mock.MagicMock() + uc_api_mock.return_value = _cred_api_mock + yield _cred_api_mock + + +@pytest.fixture() +def echo_mock(): + with mock.patch('databricks_cli.unity_catalog.cred_cli.click.echo') as echo_mock: + yield echo_mock + + +@provide_conf +def test_create_credential_cli(api_mock, echo_mock): + api_mock.create_storage_credential.return_value = STORAGE_CREDENTIAL + runner = CliRunner() + runner.invoke( + cred_cli.create_credential_cli, + args=[ + '--name', STORAGE_CREDENTIAL_NAME, + '--aws-iam-role-arn', AWS_IAM_ROLE_ARN, + '--az-sp-directory-id', AZ_SP_DIRECTORY_ID, + '--az-sp-application-id', AZ_SP_APPLICATION_ID, + '--az-sp-client-secret', AZ_SP_CLIENT_SEC, + '--az-mi-access-connector-id', AZ_MI_ACCESS_CONNECTOR_ID, + '--az-mi-id', AZ_MI_ID, + '--gcp-sak-email', GCP_SAK_EMAIL, + '--gcp-sak-private-key-id', GCP_SAK_PRIVATE_KEY_ID, + '--gcp-sak-private-key', GCP_SAK_PRIVATE_KEY, + '--comment', COMMENT, + '--skip-validation' + ]) + api_mock.create_storage_credential.assert_called_once_with(STORAGE_CREDENTIAL, True) + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIAL)) + + +@provide_conf +def test_create_credential_cli_with_json(api_mock, echo_mock): + api_mock.create_storage_credential.return_value = STORAGE_CREDENTIAL + runner = CliRunner() + runner.invoke( + cred_cli.create_credential_cli, + args=[ + '--json', '{ "name": "test_credential_name" }' + ]) + api_mock.create_storage_credential.assert_called_once_with( + { + 'name': 'test_credential_name' + }, + False) + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIAL)) + + +@provide_conf +def test_list_credentials_cli(api_mock, echo_mock): + api_mock.list_storage_credentials.return_value = STORAGE_CREDENTIALS + runner = CliRunner() + runner.invoke(cred_cli.list_credentials_cli) + api_mock.list_storage_credentials.assert_called_once() + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIALS)) + + +@provide_conf +def test_get_credential_cli(api_mock, echo_mock): + api_mock.get_storage_credential.return_value = STORAGE_CREDENTIAL + runner = CliRunner() + runner.invoke( + cred_cli.get_credential_cli, + args=['--name', STORAGE_CREDENTIAL_NAME]) + api_mock.get_storage_credential.assert_called_once_with(STORAGE_CREDENTIAL_NAME) + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIAL)) + + +@provide_conf +def test_update_credential_cli(api_mock, echo_mock): + api_mock.update_storage_credential.return_value = STORAGE_CREDENTIAL + runner = CliRunner() + runner.invoke( + cred_cli.update_credential_cli, + args=[ + '--name', STORAGE_CREDENTIAL_NAME, + '--new-name', 'new_credential_name', + '--aws-iam-role-arn', AWS_IAM_ROLE_ARN, + '--az-sp-directory-id', AZ_SP_DIRECTORY_ID, + '--az-sp-application-id', AZ_SP_APPLICATION_ID, + '--az-sp-client-secret', AZ_SP_CLIENT_SEC, + '--az-mi-access-connector-id', AZ_MI_ACCESS_CONNECTOR_ID, + '--az-mi-id', AZ_MI_ID, + '--gcp-sak-email', GCP_SAK_EMAIL, + '--gcp-sak-private-key-id', GCP_SAK_PRIVATE_KEY_ID, + '--gcp-sak-private-key', GCP_SAK_PRIVATE_KEY, + '--comment', COMMENT, + '--owner', 'owner', + '--skip-validation' + ]) + expected_data = { + 'name': 'new_credential_name', + 'aws_iam_role': { + 'role_arn': AWS_IAM_ROLE_ARN + }, + 'azure_service_principal': { + 'directory_id': AZ_SP_DIRECTORY_ID, + 'application_id': AZ_SP_APPLICATION_ID, + 'client_secret': AZ_SP_CLIENT_SEC + }, + 'azure_managed_identity': { + 'access_connector_id': AZ_MI_ACCESS_CONNECTOR_ID, + 'managed_identity_id': AZ_MI_ID + }, + 'gcp_service_account_key': { + 'email': GCP_SAK_EMAIL, + 'private_key_id': GCP_SAK_PRIVATE_KEY_ID, + 'private_key': GCP_SAK_PRIVATE_KEY + }, + 'comment': COMMENT, + 'owner': 'owner' + } + api_mock.update_storage_credential.assert_called_once_with( + STORAGE_CREDENTIAL_NAME, expected_data, True) + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIAL)) + + +@provide_conf +def test_update_credential_cli_with_json(api_mock, echo_mock): + api_mock.update_storage_credential.return_value = STORAGE_CREDENTIAL + runner = CliRunner() + runner.invoke( + cred_cli.update_credential_cli, + args=[ + '--name', STORAGE_CREDENTIAL_NAME, + '--json', '{ "name": "new_credential_name" }' + ]) + api_mock.update_storage_credential.assert_called_once_with( + STORAGE_CREDENTIAL_NAME, + { + 'name': 'new_credential_name' + }, + False) + echo_mock.assert_called_once_with(mc_pretty_format(STORAGE_CREDENTIAL)) + \ No newline at end of file diff --git a/tests/unity_catalog/test_delta_sharing_cli.py b/tests/unity_catalog/test_delta_sharing_cli.py index 8b89a8f3..679f361e 100644 --- a/tests/unity_catalog/test_delta_sharing_cli.py +++ b/tests/unity_catalog/test_delta_sharing_cli.py @@ -31,8 +31,6 @@ from databricks_cli.unity_catalog import delta_sharing_cli from tests.utils import provide_conf -ECHO_MODULE_NAME = 'databricks_cli.unity_catalog.delta_sharing_cli.click.echo' - SHARE_NAME = 'test_share' SHARES = { 'shares': [ diff --git a/tests/unity_catalog/test_ext_loc_cli.py b/tests/unity_catalog/test_ext_loc_cli.py new file mode 100644 index 00000000..fcc257ec --- /dev/null +++ b/tests/unity_catalog/test_ext_loc_cli.py @@ -0,0 +1,253 @@ +# Databricks CLI +# Copyright 2017 Databricks, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"), except +# that the use of services to which certain application programming +# interfaces (each, an "API") connect requires that the user first obtain +# a license for the use of the APIs from Databricks, Inc. ("Databricks"), +# by creating an account at www.databricks.com and agreeing to either (a) +# the Community Edition Terms of Service, (b) the Databricks Terms of +# Service, or (c) another written agreement between Licensee and Databricks +# for the use of the APIs. +# +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint:disable=redefined-outer-name + +import mock +import pytest +from click.testing import CliRunner +from databricks_cli.unity_catalog.utils import mc_pretty_format + +from databricks_cli.unity_catalog import ext_loc_cli +from tests.utils import provide_conf + +EXTERNAL_LOCATION_NAME = 'test_external_location_name' +URL = 'some_url' +CREDENTIAL_NAME = 'some_storage_credential_name' +COMMENT = 'some_comment' +EXTERNAL_LOCATIONS = { + 'external_locations': [ + { + 'name': EXTERNAL_LOCATION_NAME + } + ] +} +EXTERNAL_LOCATION = { + 'name': EXTERNAL_LOCATION_NAME, + 'url': URL, + 'credential_name': CREDENTIAL_NAME, + 'read_only': True, + 'comment': COMMENT +} + + +@pytest.fixture() +def api_mock(): + with mock.patch( + 'databricks_cli.unity_catalog.ext_loc_cli.UnityCatalogApi') as uc_api_mock: + _ext_loc_api_mock = mock.MagicMock() + uc_api_mock.return_value = _ext_loc_api_mock + yield _ext_loc_api_mock + + +@pytest.fixture() +def echo_mock(): + with mock.patch('databricks_cli.unity_catalog.ext_loc_cli.click.echo') as echo_mock: + yield echo_mock + + +@provide_conf +def test_create_location_cli(api_mock, echo_mock): + api_mock.create_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.create_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--url', URL, + '--storage-credential-name', CREDENTIAL_NAME, + '--read-only', + '--comment', COMMENT, + '--skip-validation' + ]) + api_mock.create_external_location.assert_called_once_with(EXTERNAL_LOCATION, True) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_create_location_cli_with_json(api_mock, echo_mock): + api_mock.create_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.create_location_cli, + args=[ + '--json', '{ "name": "test_location_name" }' + ]) + api_mock.create_external_location.assert_called_once_with( + { + 'name': 'test_location_name' + }, + False) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_list_locations_cli(api_mock, echo_mock): + api_mock.list_external_locations.return_value = EXTERNAL_LOCATIONS + runner = CliRunner() + runner.invoke(ext_loc_cli.list_locations_cli) + api_mock.list_external_locations.assert_called_once() + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATIONS)) + + +@provide_conf +def test_get_location_cli(api_mock, echo_mock): + api_mock.get_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.get_location_cli, + args=['--name', EXTERNAL_LOCATION_NAME]) + api_mock.get_external_location.assert_called_once_with(EXTERNAL_LOCATION_NAME) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_update_location_cli(api_mock, echo_mock): + api_mock.update_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.update_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--new-name', 'new_location_name', + '--url', URL, + '--storage-credential-name', CREDENTIAL_NAME, + '--no-read-only', + '--comment', COMMENT, + '--owner', 'owner', + '--force', + '--skip-validation' + ]) + expected_data = { + 'name': 'new_location_name', + 'url': URL, + 'credential_name': CREDENTIAL_NAME, + 'read_only': False, + 'comment': COMMENT, + 'owner': 'owner' + } + api_mock.update_external_location.assert_called_once_with( + EXTERNAL_LOCATION_NAME, expected_data, True, True) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_update_location_cli_null_read_only(api_mock, echo_mock): + api_mock.update_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.update_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--new-name', 'new_location_name' + ]) + expected_data = { + 'name': 'new_location_name', + 'url': None, + 'credential_name': None, + 'read_only': None, + 'comment': None, + 'owner': None + } + api_mock.update_external_location.assert_called_once_with( + EXTERNAL_LOCATION_NAME, expected_data, False, False) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_update_location_cli_with_json(api_mock, echo_mock): + api_mock.update_external_location.return_value = EXTERNAL_LOCATION + runner = CliRunner() + runner.invoke( + ext_loc_cli.update_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--json', '{ "name": "new_location_name" }' + ]) + api_mock.update_external_location.assert_called_once_with( + EXTERNAL_LOCATION_NAME, + { + 'name': 'new_location_name' + }, + False, + False) + echo_mock.assert_called_once_with(mc_pretty_format(EXTERNAL_LOCATION)) + + +@provide_conf +def test_delete_location_cli(api_mock): + runner = CliRunner() + runner.invoke( + ext_loc_cli.delete_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--force' + ]) + api_mock.delete_external_location.assert_called_once_with(EXTERNAL_LOCATION_NAME, True) + + +@provide_conf +def test_validate_location_cli(api_mock, echo_mock): + api_mock.validate_external_location.return_value = {} + runner = CliRunner() + runner.invoke( + ext_loc_cli.validate_location_cli, + args=[ + '--name', EXTERNAL_LOCATION_NAME, + '--url', URL, + '--cred-name', CREDENTIAL_NAME, + '--cred-aws-iam-role', 'aws-iam-role', + '--cred-az-directory-id', 'az-directory-id', + '--cred-az-application-id', 'az-application-id', + '--cred-az-client-secret', 'az-client-secret', + '--cred-az-mi-access-connector-id', 'az-mi-access-connector-id', + '--cred-az-mi-id', 'az-mi-id', + '--cred-gcp-sak-email', 'gcp-sak-email', + '--cred-gcp-sak-private-key-id', 'gcp-sak-private-key-id', + '--cred-gcp-sak-private-key', 'gcp-sak-private-key' + ]) + api_mock.validate_external_location.assert_called_once_with( + { + 'external_location_name': EXTERNAL_LOCATION_NAME, + 'url': URL, + 'storage_credential_name': CREDENTIAL_NAME, + 'aws_iam_role': { + 'role_arn': 'aws-iam-role' + }, + 'azure_service_principal': { + 'directory_id': 'az-directory-id', + 'application_id': 'az-application-id', + 'client_secret': 'az-client-secret', + }, + 'azure_managed_identity': { + 'access_connector_id': 'az-mi-access-connector-id', + 'managed_identity_id': 'az-mi-id', + }, + 'gcp_service_account_key': { + 'email': 'gcp-sak-email', + 'private_key_id': 'gcp-sak-private-key-id', + 'private_key': 'gcp-sak-private-key' + } + }) + echo_mock.assert_called_once_with(mc_pretty_format({})) + \ No newline at end of file diff --git a/tests/unity_catalog/test_metastore_cli.py b/tests/unity_catalog/test_metastore_cli.py new file mode 100644 index 00000000..c3bef063 --- /dev/null +++ b/tests/unity_catalog/test_metastore_cli.py @@ -0,0 +1,219 @@ +# Databricks CLI +# Copyright 2017 Databricks, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"), except +# that the use of services to which certain application programming +# interfaces (each, an "API") connect requires that the user first obtain +# a license for the use of the APIs from Databricks, Inc. ("Databricks"), +# by creating an account at www.databricks.com and agreeing to either (a) +# the Community Edition Terms of Service, (b) the Databricks Terms of +# Service, or (c) another written agreement between Licensee and Databricks +# for the use of the APIs. +# +# You may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# pylint:disable=redefined-outer-name + +import mock +import pytest +from click.testing import CliRunner +from databricks_cli.unity_catalog.utils import mc_pretty_format + +from databricks_cli.unity_catalog import metastore_cli +from tests.utils import provide_conf + +METASTORE_NAME = 'test_metastore' +METASTORE_ID = 'test_metastore_id' +STORAGE_ROOT = 's3://some-root' +REGION = 'us-west-2' +METASTORES = { + 'metastores': [ + { + 'id': METASTORE_ID + } + ] +} +METASTORE = { + 'name': METASTORE_NAME, + 'storage_root': STORAGE_ROOT, + 'region': REGION +} + +WORKSPACE_ID = 12345 +DEFAULT_CATALOG_NAME = 'catalog_name' +METASTORE_ASSIGNMENT = { + 'workspace_id': WORKSPACE_ID, + 'metastore_id': METASTORE_ID, + 'default_catalog_name': DEFAULT_CATALOG_NAME +} + + +@pytest.fixture() +def api_mock(): + with mock.patch( + 'databricks_cli.unity_catalog.metastore_cli.UnityCatalogApi') as uc_api_mock: + _metastore_api_mock = mock.MagicMock() + uc_api_mock.return_value = _metastore_api_mock + yield _metastore_api_mock + + +@pytest.fixture() +def echo_mock(): + with mock.patch('databricks_cli.unity_catalog.metastore_cli.click.echo') as echo_mock: + yield echo_mock + + +@provide_conf +def test_create_metastore_cli(api_mock, echo_mock): + api_mock.create_metastore.return_value = METASTORE + runner = CliRunner() + runner.invoke( + metastore_cli.create_metastore_cli, + args=[ + '--name', METASTORE_NAME, + '--storage-root', STORAGE_ROOT, + '--region', REGION + ]) + api_mock.create_metastore.assert_called_once_with( + METASTORE_NAME, STORAGE_ROOT, REGION) + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE)) + + +@provide_conf +def test_list_metastores_cli(api_mock, echo_mock): + api_mock.list_metastores.return_value = METASTORES + runner = CliRunner() + runner.invoke(metastore_cli.list_metastores_cli) + api_mock.list_metastores.assert_called_once() + echo_mock.assert_called_once_with(mc_pretty_format(METASTORES)) + + +@provide_conf +def test_get_metastore_cli(api_mock, echo_mock): + api_mock.get_metastore.return_value = METASTORE + runner = CliRunner() + runner.invoke( + metastore_cli.get_metastore_cli, + args=['--id', METASTORE_ID]) + api_mock.get_metastore.assert_called_once_with(METASTORE_ID) + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE)) + + +@provide_conf +def test_update_metastore_cli(api_mock, echo_mock): + api_mock.update_metastore.return_value = METASTORE + runner = CliRunner() + runner.invoke( + metastore_cli.update_metastore_cli, + args=[ + '--id', METASTORE_ID, + '--new-name', 'new_metastore_name', + '--storage-root-credential-id', 'new_storage_root_credential_id', + '--delta-sharing-scope', 'INTERNAL_AND_EXTERNAL', + '--delta-sharing-recipient-token-lifetime-in-seconds', '123', + '--delta-sharing-organization-name', 'new_organization_name', + '--owner', 'owner' + ]) + expected_data = { + 'name': 'new_metastore_name', + 'storage_root_credential_id': 'new_storage_root_credential_id', + 'delta_sharing_scope': 'INTERNAL_AND_EXTERNAL', + 'delta_sharing_recipient_token_lifetime_in_seconds': 123, + 'delta_sharing_organization_name': 'new_organization_name', + 'owner': 'owner' + } + api_mock.update_metastore.assert_called_once_with(METASTORE_ID, expected_data) + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE)) + + +@provide_conf +def test_update_metastore_cli_with_json(api_mock, echo_mock): + api_mock.update_metastore.return_value = METASTORE + runner = CliRunner() + runner.invoke( + metastore_cli.update_metastore_cli, + args=[ + '--id', METASTORE_ID, + '--json', '{ "name": "new_metastore_name" }' + ]) + api_mock.update_metastore.assert_called_once_with( + METASTORE_ID, + { + 'name': 'new_metastore_name' + }) + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE)) + + +@provide_conf +def test_delete_metastore_cli(api_mock): + runner = CliRunner() + runner.invoke( + metastore_cli.delete_metastore_cli, + args=[ + '--id', METASTORE_ID, + '--force' + ]) + api_mock.delete_metastore.assert_called_once_with(METASTORE_ID, True) + + +@provide_conf +def test_metastore_summary_cli(api_mock, echo_mock): + api_mock.get_metastore_summary.return_value = METASTORE + runner = CliRunner() + runner.invoke( + metastore_cli.metastore_summary_cli, + args=[]) + api_mock.get_metastore_summary.assert_called_once_with() + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE)) + + +@provide_conf +def test_get_metastore_assignment_cli(api_mock, echo_mock): + api_mock.get_current_metastore_assignment.return_value = METASTORE_ASSIGNMENT + runner = CliRunner() + runner.invoke( + metastore_cli.get_metastore_assignment_cli, + args=[]) + api_mock.get_current_metastore_assignment.assert_called_once_with() + echo_mock.assert_called_once_with(mc_pretty_format(METASTORE_ASSIGNMENT)) + + +@provide_conf +def test_assign_metastore_assignment_cli(api_mock, echo_mock): + api_mock.create_metastore_assignment.return_value = {} + runner = CliRunner() + runner.invoke( + metastore_cli.assign_metastore_cli, + args=[ + '--workspace-id', WORKSPACE_ID, + '--metastore-id', METASTORE_ID, + '--default-catalog-name', DEFAULT_CATALOG_NAME + ]) + api_mock.create_metastore_assignment.assert_called_once_with( + WORKSPACE_ID, METASTORE_ID, DEFAULT_CATALOG_NAME + ) + echo_mock.assert_called_once_with(mc_pretty_format({})) + + +@provide_conf +def test_unassign_metastore_assignment_cli(api_mock, echo_mock): + api_mock.delete_metastore_assignment.return_value = {} + runner = CliRunner() + runner.invoke( + metastore_cli.unassign_metastore_cli, + args=[ + '--workspace-id', WORKSPACE_ID, + '--metastore-id', METASTORE_ID + ]) + api_mock.delete_metastore_assignment.assert_called_once_with(WORKSPACE_ID, METASTORE_ID) + echo_mock.assert_called_once_with(mc_pretty_format({})) + \ No newline at end of file