Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add SQL Endpoint specific integration tests. #45

Merged
merged 1 commit into from
Feb 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion tests/integration/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def __init__(self, kwargs):


def _profile_from_test_name(test_name):
adapter_names = 'databricks_sql_connector',
adapter_names = ('databricks_sql_connector', 'databricks_sql_endpoint_connector')
adapters_in_name = sum(x in test_name for x in adapter_names)
if adapters_in_name != 1:
raise ValueError(
Expand Down Expand Up @@ -162,6 +162,25 @@ def databricks_sql_connector_profile(self):
}
}

def databricks_sql_endpoint_connector_profile(self):
return {
'config': {
'send_anonymous_usage_stats': False
},
'test': {
'outputs': {
'endpoint': {
'type': 'databricks',
'host': os.getenv('DBT_DATABRICKS_HOST_NAME'),
'http_path': os.getenv('DBT_DATABRICKS_ENDPOINT_HTTP_PATH'),
'token': os.getenv('DBT_DATABRICKS_TOKEN'),
'schema': self.unique_schema()
},
},
'target': 'endpoint'
}
}

@property
def packages_config(self):
return None
Expand Down Expand Up @@ -189,6 +208,8 @@ def alternative_database(self):
def get_profile(self, adapter_type):
if adapter_type == 'databricks_sql_connector':
return self.databricks_sql_connector_profile()
elif adapter_type == 'databricks_sql_endpoint_connector':
return self.databricks_sql_endpoint_connector_profile()
else:
raise ValueError('invalid adapter type {}'.format(adapter_type))

Expand Down
3 changes: 3 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,6 @@ def pytest_configure(config):
config.addinivalue_line(
"markers", "profile_databricks_sql_connector"
)
config.addinivalue_line(
"markers", "profile_databricks_sql_endpoint_connector"
)
32 changes: 32 additions & 0 deletions tests/specs/spark-databricks-sql-endpoint-connector.dbtspec
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
target:
type: databricks
host: "{{ env_var('DBT_DATABRICKS_HOST_NAME') }}"
http_path: "{{ env_var('DBT_DATABRICKS_ENDPOINT_HTTP_PATH') }}"
token: "{{ env_var('DBT_DATABRICKS_TOKEN') }}"
port: 443
schema: "analytics_{{ var('_dbt_random_suffix') }}"
connect_retries: 5
connect_timeout: 60
projects:
- overrides: snapshot_strategy_check_cols
dbt_project_yml: &file_format_delta
# we're going to UPDATE the seed tables as part of testing, so we must make them delta format
seeds:
dbt_test_project:
file_format: delta
snapshots:
dbt_test_project:
file_format: delta
- overrides: snapshot_strategy_timestamp
dbt_project_yml: *file_format_delta
sequences:
test_dbt_empty: empty
# Skip for now as not all `SET` commands work on endpoints
# test_dbt_base: base
test_dbt_ephemeral: ephemeral
test_dbt_incremental: incremental
test_dbt_snapshot_strategy_timestamp: snapshot_strategy_timestamp
test_dbt_snapshot_strategy_check_cols: snapshot_strategy_check_cols
test_dbt_data_test: data_test
test_dbt_ephemeral_data_tests: data_test_ephemeral_models
test_dbt_schema_test: schema_test
10 changes: 10 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,13 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev_requirements.txt
-e.

[testenv:integration-spark-databricks-sql-endpoint-connector]
basepython = python3.8
commands = /bin/bash -c '{envpython} -m pytest -v tests/specs/spark-databricks-sql-endpoint-connector.dbtspec'
/bin/bash -c '{envpython} -m pytest -v -m profile_databricks_sql_connector {posargs} -n4 tests/integration/*'
Copy link
Collaborator

@ueshin ueshin Feb 3, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The profile name should be profile_databricks_sql_endpoint_connector, and we need to enable the custom integration tests for the profile, e.g., in test_incremental_strategies.py:

    @use_profile("databricks_sql_endpoint_connector")
    def test_default_append_databricks_sql_endpoint_connector(self):
        self.run_and_test()

passenv = DBT_* PYTEST_ADDOPTS
deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/dev_requirements.txt
-e.