Skip to content

Commit

Permalink
Merge pull request #320 from dbt-labs/er/support-tox
Browse files Browse the repository at this point in the history
Add tox support for testing redshift, bigquery and snowflake
  • Loading branch information
dataders authored Nov 1, 2024
2 parents 241cd44 + 495c454 commit 4d8f4c0
Show file tree
Hide file tree
Showing 11 changed files with 203 additions and 75 deletions.
43 changes: 43 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# **what?**
# Run tests for dbt-external-tables against supported adapters

# **why?**
# To ensure that dbt-external-tables works as expected with all supported adapters

# **when?**
# On every PR, and every push to main and when manually triggered

name: Package Integration Tests

on:
push:
branches:
- main
pull_request:
workflow_dispatch:

jobs:
run-tests:
uses: dbt-labs/dbt-package-testing/.github/workflows/run_tox.yml@v1
with:
# redshift
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
REDSHIFT_PORT: ${{ vars.REDSHIFT_PORT }}
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
REDSHIFT_SCHEMA: "integration_tests_redshift_${{ github.run_number }}"
# snowflake
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
SNOWFLAKE_SCHEMA: "integration_tests_snowflake_${{ github.run_number }}"
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_SCHEMA: "integration_tests_bigquery_${{ github.run_number }}"

secrets:
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.DBT_ENV_SECRET_REDSHIFT_PASS }}
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.DBT_ENV_SECRET_SNOWFLAKE_PASS }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
43 changes: 23 additions & 20 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ jobs:
build:

runs-on: ubuntu-latest
environment:
name: ci_testing
strategy:
fail-fast: true
max-parallel: 3
Expand Down Expand Up @@ -40,22 +38,27 @@ jobs:
dbt run-operation prep_external --target ${{ matrix.data-platform }}
dbt -d run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target ${{ matrix.data-platform }}
dbt run-operation dbt_external_tables.stage_external_sources --target ${{ matrix.data-platform }}
dbt -d test --target ${{ matrix.data-platform }}
env:
REDSHIFT_TEST_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_TEST_USER: ${{ secrets.REDSHIFT_TEST_USER }}
REDSHIFT_TEST_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_TEST_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_TEST_DBNAME: ${{ secrets.REDSHIFT_TEST_DBNAME }}
dbt test --target ${{ matrix.data-platform }}
env:

env:
# redshift
REDSHIFT_HOST: ${{ secrets.REDSHIFT_TEST_HOST }}
REDSHIFT_USER: ${{ secrets.REDSHIFT_TEST_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_TEST_PASS }}
REDSHIFT_PORT: ${{ secrets.REDSHIFT_TEST_PORT }}
REDSHIFT_DATABASE: ${{ secrets.REDSHIFT_TEST_DBNAME }}
REDSHIFT_SPECTRUM_IAM_ROLE: ${{ secrets.REDSHIFT_SPECTRUM_IAM_ROLE }}
SNOWFLAKE_TEST_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_TEST_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
SNOWFLAKE_TEST_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_TEST_WHNAME: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_TEST_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_TEST_DBNAME: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
BIGQUERY_TEST_PROJECT: ${{ secrets.BIGQUERY_TEST_PROJECT }}
BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
BIGQUERY_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_PRIVATE_KEY_ID }}
BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
BIGQUERY_CLIENT_ID: ${{ secrets.BIGQUERY_CLIENT_ID }}
REDSHIFT_SCHEMA: "dbt_external_tables_integration_tests_redshift"
#snowflake
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_TEST_ACCOUNT }}
SNOWFLAKE_USER: ${{ secrets.SNOWFLAKE_TEST_USER }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_TEST_PASS }}
SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_TEST_WHNAME }}
SNOWFLAKE_ROLE: ${{ secrets.SNOWFLAKE_TEST_ROLE }}
SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_TEST_DBNAME }}
SNOWFLAKE_SCHEMA: "dbt_external_tables_integration_tests_snowflake"
# bigquery
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_SCHEMA: "dbt_external_tables_integration_tests_bigquery"
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@
**/env/
**/venv/
**/test.env
integration_tests/vars.env
4 changes: 4 additions & 0 deletions integration_tests/dbt_project.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ clean-targets:
- "target"
- "dbt_packages"

flags:
send_anonymous_usage_stats: False
use_colors: True

dispatch:
- macro_namespace: dbt_external_tables
search_order: ['dbt_external_tables_integration_tests', 'dbt_external_tables']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
{{ external_schema }}
from data catalog
database '{{ external_schema }}'
iam_role '{{ env_var("REDSHIFT_SPECTRUM_IAM_ROLE") }}'
iam_role 'arn:aws:iam::859831564954:role/RedshiftSpectrumTesting'
create external database if not exists;

{% endset %}
Expand Down
1 change: 0 additions & 1 deletion integration_tests/packages.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

packages:
- local: ../
- package: dbt-labs/dbt_utils
Expand Down
60 changes: 24 additions & 36 deletions integration_tests/profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,52 +2,40 @@
# HEY! This file is used in the dbt-external-tables integrations tests with CircleCI.
# You should __NEVER__ check credentials into version control. Thanks for reading :)

config:
send_anonymous_usage_stats: False
use_colors: True

integration_tests:
target: postgres
outputs:

redshift:
type: redshift
host: "{{ env_var('REDSHIFT_TEST_HOST') }}"
user: "{{ env_var('REDSHIFT_TEST_USER') }}"
pass: "{{ env_var('REDSHIFT_TEST_PASS') }}"
dbname: "{{ env_var('REDSHIFT_TEST_DBNAME') }}"
port: "{{ env_var('REDSHIFT_TEST_PORT') | as_number }}"
schema: dbt_external_tables_integration_tests_redshift
threads: 1
type: "redshift"
host: "{{ env_var('REDSHIFT_HOST') }}"
user: "{{ env_var('REDSHIFT_USER') }}"
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
schema: "{{ env_var('REDSHIFT_SCHEMA') }}"
threads: 5

snowflake:
type: snowflake
account: "{{ env_var('SNOWFLAKE_TEST_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_TEST_USER') }}"
password: "{{ env_var('SNOWFLAKE_TEST_PASS') }}"
role: "{{ env_var('SNOWFLAKE_TEST_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_TEST_DBNAME') }}"
warehouse: "{{ env_var('SNOWFLAKE_TEST_WHNAME') }}"
schema: dbt_external_tables_integration_tests_snowflake
threads: 1
type: "snowflake"
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_USER') }}"
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}"
threads: 10

bigquery:
type: bigquery
method: service-account-json
type: "bigquery"
method: "service-account-json"
project: "{{ env_var('BIGQUERY_PROJECT') }}"
dataset: "{{ env_var('BIGQUERY_SCHEMA') }}"
threads: 10
keyfile_json:
type: "service_account"
project_id: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
private_key: "{{ env_var('BIGQUERY_PRIVATE_KEY') }}"
private_key_id: "{{ env_var('BIGQUERY_PRIVATE_KEY_ID') }}"
client_email: "{{ env_var('BIGQUERY_CLIENT_EMAIL') }}"
client_id: "{{ env_var('BIGQUERY_CLIENT_ID') }}"
auth_uri: "https://accounts.google.com/o/oauth2/auth"
token_uri: "https://oauth2.googleapis.com/token"
auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs"
client_x509_cert_url: https://www.googleapis.com/robot/v1/metadata/x509/{{ env_var('BIGQUERY_CLIENT_EMAIL') | urlencode }}"
project: "{{ env_var('BIGQUERY_TEST_PROJECT') }}"
schema: dbt_external_tables_integration_tests_bigquery
threads: 1
"{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native}}"
job_retries: 3

databricks:
type: spark
Expand Down
34 changes: 17 additions & 17 deletions integration_tests/test.env.sample
Original file line number Diff line number Diff line change
@@ -1,28 +1,28 @@
# gh secret set -f integration_tests/test.env -e ci_testing

# redshift
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_HOST=
REDSHIFT_USER=
DBT_ENV_SECRET_REDSHIFT_PASS=
REDSHIFT_PORT=
REDSHIFT_DBNAME=
REDSHIFT_SCHEMA=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake

SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASS=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DBNAME=
SNOWFLAKE_TEST_WHNAME=
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
DBT_ENV_SECRET_SNOWFLAKE_PASS=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_SCHEMA=
SNOWFLAKE_WAREHOUSE=

# bigquery
BIGQUERY_PRIVATE_KEY=
BIGQUERY_PRIVATE_KEY_ID=
BIGQUERY_CLIENT_EMAIL=
BIGQUERY_CLIENT_ID=
BIGQUERY_TEST_PROJECT=
BIGQUERY_PROJECT=
BIGQUERY_SCHEMA=
BIGQUERY_KEYFILE_JSON=


# databricks
DATABRICKS_TEST_HOST=
Expand Down
18 changes: 18 additions & 0 deletions integration_tests/vars.env.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# gh variable set -f integration_tests/vars.env

# redshift
REDSHIFT_HOST=
REDSHIFT_USER=
REDSHIFT_DATABASE=
REDSHIFT_PORT=
REDSHIFT_SPECTRUM_IAM_ROLE=

# snowflake
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_WAREHOUSE=

# bigquery
BIGQUERY_PROJECT=
1 change: 1 addition & 0 deletions supported_adapters.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SUPPORTED_ADAPTERS=snowflake,redshift,bigquery
71 changes: 71 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
[tox]
skipsdist = True
envlist = lint_all, testenv

[testenv]
passenv =
# redshift
REDSHIFT_HOST
REDSHIFT_USER
DBT_ENV_SECRET_REDSHIFT_PASS
REDSHIFT_DATABASE
REDSHIFT_SCHEMA
REDSHIFT_PORT
REDSHIFT_SPECTRUM_IAM_ROLE
# snowflake
SNOWFLAKE_ACCOUNT
SNOWFLAKE_USER
DBT_ENV_SECRET_SNOWFLAKE_PASS
SNOWFLAKE_ROLE
SNOWFLAKE_DATABASE
SNOWFLAKE_WAREHOUSE
SNOWFLAKE_SCHEMA
# bigquery
BIGQUERY_KEYFILE_JSON
BIGQUERY_PROJECT
BIGQUERY_SCHEMA

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_redshift]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target redshift
dbt seed --full-refresh --target redshift
dbt run --target redshift
dbt run-operation prep_external --target redshift
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target redshift
dbt run-operation dbt_external_tables.stage_external_sources --target redshift
dbt test --target redshift

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_snowflake]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target snowflake
dbt seed --full-refresh --target snowflake
dbt run --target snowflake
dbt run-operation prep_external --target snowflake
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target snowflake
dbt run-operation dbt_external_tables.stage_external_sources --target snowflake
dbt test --target snowflake

# run dbt commands directly, assumes dbt is already installed in environment
[testenv:dbt_integration_bigquery]
changedir = integration_tests
allowlist_externals =
dbt
skip_install = true
commands =
dbt deps --target bigquery
dbt seed --full-refresh --target bigquery
dbt run --target bigquery
dbt run-operation prep_external --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --vars 'ext_full_refresh: true' --target bigquery
dbt run-operation dbt_external_tables.stage_external_sources --target bigquery
dbt test --target bigquery

0 comments on commit 4d8f4c0

Please sign in to comment.