From 52111b5917be7ee4d928d3d46bebed09cdc98d07 Mon Sep 17 00:00:00 2001 From: Shant Hovsepian Date: Wed, 2 Feb 2022 23:27:56 -0500 Subject: [PATCH] Add SQL Endpoint specific integration tests. Adds a new test profile and env variable DBT_DATABRICKS_ENDPOINT_HTTP_PATH --- tests/integration/base.py | 23 ++++++++++++- tests/integration/conftest.py | 3 ++ ...-databricks-sql-endpoint-connector.dbtspec | 32 +++++++++++++++++++ tox.ini | 10 ++++++ 4 files changed, 67 insertions(+), 1 deletion(-) create mode 100644 tests/specs/spark-databricks-sql-endpoint-connector.dbtspec diff --git a/tests/integration/base.py b/tests/integration/base.py index e94e048e1..aab6675f7 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -85,7 +85,7 @@ def __init__(self, kwargs): def _profile_from_test_name(test_name): - adapter_names = 'databricks_sql_connector', + adapter_names = ('databricks_sql_connector', 'databricks_sql_endpoint_connector') adapters_in_name = sum(x in test_name for x in adapter_names) if adapters_in_name != 1: raise ValueError( @@ -162,6 +162,25 @@ def databricks_sql_connector_profile(self): } } + def databricks_sql_endpoint_connector_profile(self): + return { + 'config': { + 'send_anonymous_usage_stats': False + }, + 'test': { + 'outputs': { + 'endpoint': { + 'type': 'databricks', + 'host': os.getenv('DBT_DATABRICKS_HOST_NAME'), + 'http_path': os.getenv('DBT_DATABRICKS_ENDPOINT_HTTP_PATH'), + 'token': os.getenv('DBT_DATABRICKS_TOKEN'), + 'schema': self.unique_schema() + }, + }, + 'target': 'endpoint' + } + } + @property def packages_config(self): return None @@ -189,6 +208,8 @@ def alternative_database(self): def get_profile(self, adapter_type): if adapter_type == 'databricks_sql_connector': return self.databricks_sql_connector_profile() + elif adapter_type == 'databricks_sql_endpoint_connector': + return self.databricks_sql_endpoint_connector_profile() else: raise ValueError('invalid adapter type {}'.format(adapter_type)) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index dbc7e7933..5bdc2c4dd 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -2,3 +2,6 @@ def pytest_configure(config): config.addinivalue_line( "markers", "profile_databricks_sql_connector" ) + config.addinivalue_line( + "markers", "profile_databricks_sql_endpoint_connector" + ) diff --git a/tests/specs/spark-databricks-sql-endpoint-connector.dbtspec b/tests/specs/spark-databricks-sql-endpoint-connector.dbtspec new file mode 100644 index 000000000..b910872a2 --- /dev/null +++ b/tests/specs/spark-databricks-sql-endpoint-connector.dbtspec @@ -0,0 +1,32 @@ +target: + type: databricks + host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}" + http_path: "{{ env_var('DBT_DATABRICKS_ENDPOINT_HTTP_PATH') }}" + token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}" + port: 443 + schema: "analytics_{{ var('_dbt_random_suffix') }}" + connect_retries: 5 + connect_timeout: 60 +projects: + - overrides: snapshot_strategy_check_cols + dbt_project_yml: &file_format_delta + # we're going to UPDATE the seed tables as part of testing, so we must make them delta format + seeds: + dbt_test_project: + file_format: delta + snapshots: + dbt_test_project: + file_format: delta + - overrides: snapshot_strategy_timestamp + dbt_project_yml: *file_format_delta +sequences: + test_dbt_empty: empty + # Skip for now as not all `SET` commands work on endpoints + # test_dbt_base: base + test_dbt_ephemeral: ephemeral + test_dbt_incremental: incremental + test_dbt_snapshot_strategy_timestamp: snapshot_strategy_timestamp + test_dbt_snapshot_strategy_check_cols: snapshot_strategy_check_cols + test_dbt_data_test: data_test + test_dbt_ephemeral_data_tests: data_test_ephemeral_models + test_dbt_schema_test: schema_test diff --git a/tox.ini b/tox.ini index bce3a02de..0e8ece658 100644 --- a/tox.ini +++ b/tox.ini @@ -27,3 +27,13 @@ deps = -r{toxinidir}/requirements.txt -r{toxinidir}/dev_requirements.txt -e. + +[testenv:integration-spark-databricks-sql-endpoint-connector] +basepython = python3.8 +commands = /bin/bash -c '{envpython} -m pytest -v tests/specs/spark-databricks-sql-endpoint-connector.dbtspec' + /bin/bash -c '{envpython} -m pytest -v -m profile_databricks_sql_connector {posargs} -n4 tests/integration/*' +passenv = DBT_* PYTEST_ADDOPTS +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/dev_requirements.txt + -e.