Skip to content

Update to dbt-core 1.7 compatibility (#119) #318

Update to dbt-core 1.7 compatibility (#119)

Update to dbt-core 1.7 compatibility (#119) #318

Workflow file for this run

name: Run package tests for all dbs
on: [push, delete]
env:
DBT_PROFILES_DIR: ${{ github.workspace }}/
SNOWFLAKE_RE_DATA_TESTING_ACCOUNT: ${{ secrets.SNOWFLAKE_RE_DATA_TESTING_ACCOUNT }}
RE_DATA_TESTING_USER: ${{ secrets.RE_DATA_TESTING_USER }}
RE_DATA_TESTING_PASSWORD: ${{ secrets.RE_DATA_TESTING_PASSWORD }}
REDSHIFT_RE_DATA_TESTING_HOST: ${{ secrets.REDSHIFT_RE_DATA_TESTING_HOST }}
BIGQUERY_TESTING_TYPE: ${{ secrets.BIGQUERY_TESTING_TYPE }}
BIGQUERY_TESTING_PROJECT_ID: ${{ secrets.BIGQUERY_TESTING_PROJECT_ID }}
BIGQUERY_TESTING_PRIVATE_KEY_ID: ${{ secrets.BIGQUERY_TESTING_PRIVATE_KEY_ID }}
BIGQUERY_TESTING_PRIVATE_KEY: ${{ secrets.BIGQUERY_TESTING_PRIVATE_KEY }}
BIGQUERY_TESTING_CLIENT_EMAIL: ${{ secrets.BIGQUERY_TESTING_CLIENT_EMAIL }}
BIGQUERY_TESTING_CLIENT_ID: ${{ secrets.BIGQUERY_TESTING_CLIENT_ID }}
BIGQUERY_TESTING_AUTH_URI: ${{ secrets.BIGQUERY_TESTING_AUTH_URI }}
BIGQUERY_TESTING_TOKEN_URI: ${{ secrets.BIGQUERY_TESTING_TOKEN_URI }}
BIGQUERY_TESTING_AUTH_PROVIDER_X509_CERT_URL: ${{ secrets.BIGQUERY_TESTING_AUTH_PROVIDER_X509_CERT_URL }}
BIGQUERY_TESTING_CLIENT_X509_CERT_URL: ${{ secrets.BIGQUERY_TESTING_CLIENT_X509_CERT_URL }}
DBT_VERSION: 1.7
jobs:
test-other-dbs:
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.repository == 're-data/dbt-re-data'
strategy:
fail-fast: false
matrix:
database: [snowflake, bigquery, redshift]
steps:
- name: Check out
uses: actions/checkout@v2
- uses: actions/setup-python@v4
with:
python-version: "3.8.x"
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v3.x
- name: Set the DQ_SCHEMA environment variable
shell: bash
run: |
echo "DQ_SCHEMA=dq_${GITHUB_REF_SLUG//[^[:alnum:]]/_}" >> $GITHUB_ENV
- name: Print DQ_SCHEMA
run: |
echo $DQ_SCHEMA
- name: Install dependencies
working-directory: ./integration_tests
run: |
pip install -r requirements.txt
pip install dbt-${{ matrix.database }}==$DBT_VERSION
dbt deps
- name: Drop schemas
working-directory: ./integration_tests
run: |
dbt run-operation drop_all_schemas --args "{ schema_name: ${{ env.DQ_SCHEMA }} }" --profile re_data_${{ matrix.database }} --vars "{ source_schema: ${{ env.DQ_SCHEMA }} }"
- name: Create Schemas if needed
if: matrix.database == 'redshift'
working-directory: ./integration_tests
run: |
dbt run-operation create_required_schemas --args "{ schema_name: ${{ env.DQ_SCHEMA }} }" --profile re_data_${{ matrix.database }} --vars "{ source_schema: ${{ env.DQ_SCHEMA }} }"
- name: Test DB
working-directory: ./integration_tests/python_tests
run: |
pytest --db ${{ matrix.database }} --source_schema ${{ env.DQ_SCHEMA }}
test-postgres:
runs-on: ubuntu-latest
if: github.event_name == 'push'
services:
postgres:
image: postgres
env:
POSTGRES_PASSWORD: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- name: Check out
uses: actions/checkout@v2
- uses: actions/setup-python@v4
with:
python-version: "3.8.x"
- name: Install dependencies
working-directory: ./integration_tests
run: |
pip install -r requirements.txt
pip install dbt-postgres==$DBT_VERSION
dbt deps
- name: Test DB
working-directory: ./integration_tests/python_tests
run: pytest --db postgres --source_schema dq
clean-up-schemas:
runs-on: ubuntu-latest
if: github.event_name == 'delete' && github.repository == 're-data/dbt-re-data'
strategy:
fail-fast: false
matrix:
database: [snowflake, bigquery, redshift]
steps:
- name: Check out
uses: actions/checkout@v2
- uses: actions/setup-python@v4
with:
python-version: "3.8.x"
- name: Inject slug/short variables
uses: rlespinasse/github-slug-action@v3.x
- name: Set the DQ_SCHEMA environment variable
shell: bash
run: |
echo "DQ_SCHEMA=dq_${GITHUB_EVENT_REF_SLUG//[^[:alnum:]]/_}" >> $GITHUB_ENV
- name: Print DQ_SCHEMA
run: |
echo $DQ_SCHEMA
- name: Install dependencies and drop branch schema
working-directory: ./integration_tests
run: |
pip install -r requirements.txt
pip install dbt-${{ matrix.database }}==$DBT_VERSION
dbt deps
dbt run-operation drop_all_schemas --args "{ schema_name: ${{ env.DQ_SCHEMA }} }" --profile re_data_${{ matrix.database }} --vars "{ source_schema: ${{ env.DQ_SCHEMA }} }"