Skip to content

Commit

Permalink
Add tox (#919)
Browse files Browse the repository at this point in the history
* update env var, add tox support for running dbt commands

* add very basic pytest that doesnt touch dbt

* basic cleanup

* add supported adapter list

* tweak the tox adapters list

* tweak the tox adapters list

* add comments, fix some typos

* add back tests

* put back pass

* add more to ignore

* move config

* fix env var name

* consolidate renaming

* add redshift/bq

* add more WH, tweak vars

* fix profile key

* Update tox.ini

* add github worflow, move suported adapters

* start postgres

* tweaks

* change keyfile

* change how postrges is handled

* add other vars

* test redshift

* make schema unique per run

* updates to get BQ working

* cleanup tox file

* move and rename profiles.yml

* update testing to reflect new profiles

* update readme to reflect changes to profile

* add newliens

* use a different schema name

* Trigger CircleCI

* rename postgres vars

* Trigger CircleCI

* Setup environment variables for dbt-bigquery in CircleCI

* Update CircleCI to use the same profiles target as GitHub Actions

* Add whitespace

* Restore original profiles.yml file ahead of rename

* Move and rename profiles.yml

* Update profiles.yml to use new environment variables

---------

Co-authored-by: Doug Beatty <doug.beatty@dbtlabs.com>
  • Loading branch information
emmyoop and dbeatty10 authored Aug 27, 2024
1 parent 9237ba9 commit 4feda1c
Show file tree
Hide file tree
Showing 17 changed files with 322 additions and 184 deletions.
18 changes: 10 additions & 8 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,12 @@ jobs:
environment:
POSTGRES_USER: root
environment:
POSTGRES_TEST_HOST: localhost
POSTGRES_TEST_USER: root
POSTGRES_TEST_PASS: ''
POSTGRES_TEST_PORT: 5432
POSTGRES_TEST_DBNAME: circle_test
POSTGRES_HOST: localhost
POSTGRES_USER: root
DBT_ENV_SECRET_POSTGRES_PASS: ''
POSTGRES_PORT: 5432
POSTGRES_DATABASE: circle_test
POSTGRES_SCHEMA: dbt_utils_integration_tests_postgres

steps:
- checkout
Expand Down Expand Up @@ -71,8 +72,8 @@ jobs:
- checkout
- run: pip install --pre dbt-bigquery -r dev-requirements.txt
- run:
name: "Set up credentials"
command: echo $BIGQUERY_SERVICE_ACCOUNT_JSON > ${HOME}/bigquery-service-key.json
name: Setup Environment Variables
command: echo 'export BIGQUERY_KEYFILE_JSON="$BIGQUERY_SERVICE_ACCOUNT_JSON"' >> "$BASH_ENV"
- run:
name: "Run OG Tests - BigQuery"
command: ./run_test.sh bigquery
Expand All @@ -87,7 +88,8 @@ workflows:
version: 2
test-all:
jobs:
- integration-postgres
- integration-postgres:
context: profile-postgres
- integration-redshift:
context: profile-redshift
requires:
Expand Down
139 changes: 139 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
# **what?**
# Run tests for dbt-utils against supported adapters

# **why?**
# To ensure that dbt-utils works as expected with all supported adapters

# **when?**
# On every PR, and every push to main and when manually triggered

name: Package Integration Tests

on:
push:
branches:
- main
pull_request:
workflow_dispatch:
inputs:
adapter:
description: The adapter to test against. Defaults to all supported adapters when blank.
type: string
required: false

env:
PYTHON_VERSION: "3.11"

jobs:
determine-supported-adapters:
runs-on: ubuntu-latest
outputs:
adapters: ${{ steps.supported-adapters.outputs.adapters }}
steps:
- name: "Checkout ${{ github.event.repository }}"
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Get list of supported adapters or use input adapter only"
id: list-adapters
run: |
if [ -z "${{ inputs.adapter }}" ]; then
# github adds a pip freeze and a new line we need to strip out
source supported_adapters.env
echo $SUPPORTED_ADAPTERS
echo "test_adapters=$SUPPORTED_ADAPTERS" >> $GITHUB_OUTPUT
else
echo "test_adapters=${{ inputs.adapter }}" >> $GITHUB_OUTPUT
fi
- name: "Format adapter list for use as the matrix"
id: supported-adapters
run: |
# Convert to JSON array and output
supported_adapters=$(echo "${{ steps.list-adapters.outputs.test_adapters }}" | jq -Rc 'split(",")')
echo $supported_adapters
echo "adapters=$supported_adapters" >> $GITHUB_OUTPUT
- name: "[ANNOTATION] ${{ github.event.repository.name }} - Testing ${{ steps.supported-adapters.outputs.adapters }}"
run: |
title="${{ github.event.repository.name }} - adapters to test"
message="The workflow will run tests for the following adapters: ${{ steps.supported-adapters.outputs.adapters }}"
echo "::notice $title::$message"
run-tests:
runs-on: ubuntu-latest
needs: [determine-supported-adapters]
services:
postgres:
image: postgres
env:
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASS }}
POSTGRES_DB: ${{ vars.POSTGRES_DATABASE }}
POSTGRES_HOST: ${{ vars.POSTGRES_HOST }}
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
strategy:
fail-fast: false
matrix:
adapter: ${{fromJson(needs.determine-supported-adapters.outputs.adapters)}}

steps:
- name: "Checkout ${{ github.event.repository }} "
uses: actions/checkout@v4

- name: "Set up Python ${{ env.PYTHON_VERSION }}"
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}

- name: "Install ${{ matrix.adapter }}"
run: |
python -m pip install --upgrade pip
pip install dbt-${{ matrix.adapter }}
- name: "Install tox"
run: |
python -m pip install --upgrade pip
pip install tox
- name: "Run integration tests with tox on ${{ matrix.adapter }}"
run: |
tox -e dbt_integration_${{ matrix.adapter }}
env:
POSTGRES_HOST: ${{ vars.POSTGRES_HOST }}
POSTGRES_USER: ${{ vars.POSTGRES_USER }}
DBT_ENV_SECRET_POSTGRES_PASS: ${{ secrets.POSTGRES_PASS }}
POSTGRES_PORT: 5432
POSTGRES_DATABASE: ${{ vars.POSTGRES_DATABASE }}
POSTGRES_SCHEMA: "dbt_utils_integration_tests_postgres_${{ github.run_number }}"
SNOWFLAKE_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }}
SNOWFLAKE_USER: ${{ vars.SNOWFLAKE_USER }}
DBT_ENV_SECRET_SNOWFLAKE_PASS: ${{ secrets.SNOWFLAKE_PASS }}
SNOWFLAKE_ROLE: ${{ vars.SNOWFLAKE_ROLE }}
SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }}
SNOWFLAKE_WAREHOUSE: ${{ vars.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_SCHEMA: "dbt_utils_integration_tests_snowflake_${{ github.run_number }}"
REDSHIFT_HOST: ${{ vars.REDSHIFT_HOST }}
REDSHIFT_USER: ${{ vars.REDSHIFT_USER }}
DBT_ENV_SECRET_REDSHIFT_PASS: ${{ secrets.REDSHIFT_PASS }}
REDSHIFT_DATABASE: ${{ vars.REDSHIFT_DATABASE }}
REDSHIFT_SCHEMA: "dbt_utils_integration_tests_redshift_${{ github.run_number }}"
REDSHIFT_PORT: 5439
BIGQUERY_PROJECT: ${{ vars.BIGQUERY_PROJECT }}
BIGQUERY_KEYFILE_JSON: ${{ secrets.BIGQUERY_KEYFILE_JSON }}
BIGQUERY_SCHEMA: "dbt_utils_integration_tests_bigquery_${{ github.run_number }}"
15 changes: 13 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,17 @@ dbt_modules/
dbt_packages/
logs/
venv/
env/
__pycache__
.tox/
/.pytest_cache/


# Ignore all directories that start with 'env-' and can have any name after
env*/

# Do not ignore .env files in any directory and do not ignore .env directories
!.env
!*/.env/

# But explicitly ignore test.env files
test.env
__pycache__
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

.PHONY: test
test: ## Run the integration tests.
@./run_test.sh $(target)
@\
tox -e dbt_integration_$(target)

.PHONY: dev
dev: ## Installs dbt-* packages in develop mode along with development dependencies.
Expand Down
1 change: 1 addition & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ dbt-redshift@git+https://github.com/dbt-labs/dbt-redshift.git
dbt-snowflake@git+https://github.com/dbt-labs/dbt-snowflake.git
dbt-bigquery@git+https://github.com/dbt-labs/dbt-bigquery.git
pytest-xdist
tox>=3.13
5 changes: 3 additions & 2 deletions integration_tests/.env/bigquery.env
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
BIGQUERY_SERVICE_KEY_PATH=
BIGQUERY_TEST_DATABASE=
BIGQUERY_KEYFILE_JSON=
BIGQUERY_PROJECT=
BIGQUERY_SCHEMA=dbt_utils_integration_tests_bigquery
11 changes: 6 additions & 5 deletions integration_tests/.env/postgres.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
POSTGRES_TEST_HOST=localhost
POSTGRES_TEST_USER=root
POSTGRES_TEST_PASS=''
POSTGRES_TEST_PORT=5432
POSTGRES_TEST_DBNAME=circle_test
POSTGRES_HOST=localhost
POSTGRES_USER=root
DBT_ENV_SECRET_POSTGRES_PASS=password
POSTGRES_PORT=5432
POSTGRES_DATABASE=dbt_utils_test
POSTGRES_SCHEMA=dbt_utils_integration_tests_postgres
11 changes: 6 additions & 5 deletions integration_tests/.env/redshift.env
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
REDSHIFT_TEST_HOST=
REDSHIFT_TEST_USER=
REDSHIFT_TEST_PASS=
REDSHIFT_TEST_DBNAME=
REDSHIFT_TEST_PORT=
REDSHIFT_HOST=
REDSHIFT_USER=
DBT_ENV_SECRET_REDSHIFT_PASS=
REDSHIFT_DATABASE=
REDSHIFT_PORT=
REDSHIFT_SCHEMA=dbt_utils_integration_tests_redshift
13 changes: 7 additions & 6 deletions integration_tests/.env/snowflake.env
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
SNOWFLAKE_TEST_ACCOUNT=
SNOWFLAKE_TEST_USER=
SNOWFLAKE_TEST_PASSWORD=
SNOWFLAKE_TEST_ROLE=
SNOWFLAKE_TEST_DATABASE=
SNOWFLAKE_TEST_WAREHOUSE=
SNOWFLAKE_ACCOUNT=
SNOWFLAKE_USER=
DBT_ENV_SECRET_SNOWFLAKE_PASS=
SNOWFLAKE_ROLE=
SNOWFLAKE_DATABASE=
SNOWFLAKE_WAREHOUSE=
SNOWFLAKE_SCHEMA=dbt_utils_integration_tests_snowflake
12 changes: 1 addition & 11 deletions integration_tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
- Docker

### Configure credentials
Edit the env file for your TARGET in `integration_tests/.env/[TARGET].env`.
Edit the env file for your TARGET in `integration_tests/.env/[TARGET].env`. These will be used for your profiles.yml.

Load the environment variables:
```shell
Expand Down Expand Up @@ -91,16 +91,6 @@ Where possible, targets are being run in docker containers (this works for Postg

### Creating a new integration test

#### Set up profiles
Do either one of the following:
1. Use `DBT_PROFILES_DIR`
```shell
cp integration_tests/ci/sample.profiles.yml integration_tests/profiles.yml
export DBT_PROFILES_DIR=$(cd integration_tests && pwd)
```
2. Use `~/.dbt/profiles.yml`
- Copy contents from `integration_tests/ci/sample.profiles.yml` into `~/.dbt/profiles.yml`.

#### Add your integration test
This directory contains an example dbt project which tests the macros in the `dbt-utils` package. An integration test typically involves making 1) a new seed file 2) a new model file 3) a generic test to assert anticipated behaviour.

Expand Down
45 changes: 0 additions & 45 deletions integration_tests/ci/sample.profiles.yml

This file was deleted.

48 changes: 48 additions & 0 deletions integration_tests/profiles.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@

# HEY! This file is used in the dbt-utils integrations tests with CircleCI.
# You should __NEVER__ check credentials into version control. Thanks for reading :)


integration_tests:
target: postgres
outputs:
postgres:
type: "postgres"
host: "{{ env_var('POSTGRES_HOST') }}"
user: "{{ env_var('POSTGRES_USER') }}"
pass: "{{ env_var('DBT_ENV_SECRET_POSTGRES_PASS') }}"
port: "{{ env_var('POSTGRES_PORT') | as_number }}"
dbname: "{{ env_var('POSTGRES_DATABASE') }}"
schema: "{{ env_var('POSTGRES_SCHEMA') }}"
threads: 5

redshift:
type: "redshift"
host: "{{ env_var('REDSHIFT_HOST') }}"
user: "{{ env_var('REDSHIFT_USER') }}"
pass: "{{ env_var('DBT_ENV_SECRET_REDSHIFT_PASS') }}"
dbname: "{{ env_var('REDSHIFT_DATABASE') }}"
port: "{{ env_var('REDSHIFT_PORT') | as_number }}"
schema: "{{ env_var('REDSHIFT_SCHEMA') }}"
threads: 5

bigquery:
type: "bigquery"
method: "service-account-json"
project: "{{ env_var('BIGQUERY_PROJECT') }}"
dataset: "{{ env_var('BIGQUERY_SCHEMA') }}"
threads: 10
keyfile_json:
"{{ env_var('BIGQUERY_KEYFILE_JSON') | as_native }}"
job_retries: 3

snowflake:
type: "snowflake"
account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}"
user: "{{ env_var('SNOWFLAKE_USER') }}"
password: "{{ env_var('DBT_ENV_SECRET_SNOWFLAKE_PASS') }}"
role: "{{ env_var('SNOWFLAKE_ROLE') }}"
database: "{{ env_var('SNOWFLAKE_DATABASE') }}"
warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}"
schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}"
threads: 10
1 change: 0 additions & 1 deletion run_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ dbt --version

# Set the profile
cd integration_tests
cp ci/sample.profiles.yml profiles.yml
export DBT_PROFILES_DIR=.

# Show the location of the profiles directory and test the connection
Expand Down
1 change: 1 addition & 0 deletions supported_adapters.env
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SUPPORTED_ADAPTERS=postgres,snowflake,redshift,bigquery
Empty file removed tests/__init__.py
Empty file.
Loading

0 comments on commit 4feda1c

Please sign in to comment.