Skip to content

Commit

Permalink
Merge branch 'main' into er/add-bigquery-testing
Browse files Browse the repository at this point in the history
  • Loading branch information
dbeatty10 committed Nov 22, 2024
2 parents d542ce1 + fdc998c commit 164103a
Show file tree
Hide file tree
Showing 7 changed files with 112 additions and 98 deletions.
171 changes: 101 additions & 70 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,91 +1,113 @@
version: 2

jobs:
build:

integration-postgres:
docker:
- image: cimg/python:3.9.9
- image: circleci/postgres:9.6.5-alpine-ram
- image: cimg/python:3.11
- image: cimg/postgres:9.6
environment:
POSTGRES_USER: root
environment:
POSTGRES_HOST: localhost
POSTGRES_USER: root
DBT_ENV_SECRET_POSTGRES_PASS: ''
POSTGRES_PORT: 5432
POSTGRES_DATABASE: circle_test
POSTGRES_SCHEMA: codegen_integration_tests_postgres

steps:
- checkout

- run: pip install dbt-core dbt-postgres
- run:
name: Setup Environment Variables
command: echo 'export BIGQUERY_KEYFILE_JSON="$BIGQUERY_SERVICE_ACCOUNT_JSON"' >> "$BASH_ENV"
name: "Run Tests - Postgres"
command: |
cd integration_tests
dbt --warn-error deps --target postgres
dbt --warn-error run-operation create_source_table --target postgres
dbt --warn-error seed --target postgres --full-refresh
dbt --warn-error run --target postgres
dbt --warn-error test --target postgres
- store_artifacts:
path: integration_tests/logs
- store_artifacts:
path: integration_tests/target

- restore_cache:
key: deps1-{{ .Branch }}
# The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass
resource_class: large

integration-redshift:
docker:
- image: cimg/python:3.11
steps:
- checkout
- run: pip install dbt-core dbt-redshift
- run:
name: "Setup dbt"
name: "Run Tests - Redshift"
command: |
python3 -m venv dbt_venv
. dbt_venv/bin/activate
python -m pip install --upgrade pip setuptools
python -m pip install --pre dbt-core dbt-postgres dbt-redshift dbt-snowflake dbt-bigquery
# - run:
# name: "Run Tests - Postgres"
# environment:
# POSTGRES_HOST: localhost
# POSTGRES_USER: root
# DBT_ENV_SECRET_POSTGRES_PASS: ""
# POSTGRES_PORT: 5432
# POSTGRES_DATABASE: circle_test
# POSTGRES_SCHEMA: codegen_integration_tests_postgres
# command: |
# . dbt_venv/bin/activate
# cd integration_tests
# dbt --warn-error deps --target postgres
# dbt --warn-error run-operation create_source_table --target postgres
# dbt --warn-error seed --target postgres --full-refresh
# dbt --warn-error run --target postgres
# dbt --warn-error test --target postgres

# - run:
# name: "Run Tests - Redshift"
# command: |
# . dbt_venv/bin/activate
# echo `pwd`
# cd integration_tests
# dbt --warn-error deps --target redshift
# dbt --warn-error run-operation create_source_table --target redshift
# dbt --warn-error seed --target redshift --full-refresh
# dbt --warn-error run --target redshift
# dbt --warn-error test --target redshift
cd integration_tests
dbt --warn-error deps --target redshift
dbt --warn-error run-operation create_source_table --target redshift
dbt --warn-error seed --target redshift --full-refresh
dbt --warn-error run --target redshift
dbt --warn-error test --target redshift
- store_artifacts:
path: integration_tests/logs
- store_artifacts:
path: integration_tests/target
# The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass
resource_class: large

# - run:
# name: "Run Tests - Snowflake"
# command: |
# . dbt_venv/bin/activate
# echo `pwd`
# cd integration_tests
# dbt --warn-error deps --target snowflake
# dbt --warn-error run-operation create_source_table --target snowflake
# dbt --warn-error seed --target snowflake --full-refresh
# dbt --warn-error run --target snowflake
# dbt --warn-error test --target snowflake
integration-snowflake:
docker:
- image: cimg/python:3.11
steps:
- checkout
- run: pip install dbt-core dbt-snowflake
- run:
name: "Run Tests - Snowflake"
command: |
cd integration_tests
dbt --warn-error deps --target snowflake
dbt --warn-error run-operation create_source_table --target snowflake
dbt --warn-error seed --target snowflake --full-refresh
dbt --warn-error run --target snowflake
dbt --warn-error test --target snowflake
- store_artifacts:
path: integration_tests/logs
- store_artifacts:
path: integration_tests/target
# The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass
resource_class: large

integration-bigquery:
environment:
BIGQUERY_SERVICE_KEY_PATH: "/home/circleci/bigquery-service-key.json"
docker:
- image: cimg/python:3.11
steps:
- checkout
- run: pip install dbt-core dbt-bigquery
- run:
name: Setup Environment Variables
command: |
echo $BIGQUERY_SERVICE_ACCOUNT_JSON > $BIGQUERY_SERVICE_KEY_PATH
echo 'export BIGQUERY_KEYFILE_JSON="$BIGQUERY_SERVICE_ACCOUNT_JSON"' >> "$BASH_ENV"
- run:
name: "Run Tests - BigQuery"
environment:
BIGQUERY_SERVICE_KEY_PATH: "/home/circleci/bigquery-service-key.json"

command: |
. dbt_venv/bin/activate
echo `pwd`
cd integration_tests
dbt --warn-error deps --target bigquery
dbt --warn-error run-operation create_source_table --target bigquery
dbt --warn-error seed --target bigquery --full-refresh
dbt --warn-error run --target bigquery
dbt --warn-error test --target bigquery
- save_cache:
key: deps1-{{ .Branch }}
paths:
- "dbt_venv"
- store_artifacts:
path: integration_tests/logs
- store_artifacts:
path: integration_tests/target
# The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass
resource_class: large

- store_artifacts:
path: integration_tests/logs
Expand All @@ -97,8 +119,17 @@ workflows:
version: 2
test-all:
jobs:
- build:
context:
- profile-redshift
- profile-snowflake
- profile-bigquery
- integration-postgres:
context: profile-postgres
- integration-redshift:
context: profile-redshift
requires:
- integration-postgres
- integration-snowflake:
context: profile-snowflake
requires:
- integration-postgres
- integration-bigquery:
context: profile-bigquery
requires:
- integration-postgres
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ dev: ## Installs dbt-* packages in develop mode along with development dependenc
@\
echo "Install dbt-$(target)..."; \
python -m pip install --upgrade pip setuptools; \
python -m pip install --pre dbt-core "dbt-$(target)";
python -m pip install dbt-core "dbt-$(target)";

.PHONY: setup-db
setup-db: ## Setup Postgres database with docker-compose for system testing.
Expand Down
7 changes: 2 additions & 5 deletions integration_tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,20 +89,17 @@ Next, install `dbt-core` (and its dependencies) with:
```shell
make dev target=[postgres|redshift|...]
# or
python3 -m pip install --pre dbt-core dbt-[postgres|redshift|...]
python3 -m pip install dbt-core dbt-[postgres|redshift|...]
```

Or more specific:

```shell
make dev target=postgres
# or
python3 -m pip install --pre dbt-core dbt-postgres
python3 -m pip install dbt-core dbt-postgres
```

> [!NOTE]
> The `--pre` flag tells pip to install the latest pre-release version of whatever you pass to install. This ensures you're always using the latest version of dbt, so if your code interacts with dbt in a way that causes issues or test failures, we'll know about it ahead of a release.
Make sure to reload your virtual environment after installing the dependencies:

```shell
Expand Down
18 changes: 2 additions & 16 deletions integration_tests/macros/operations/create_source_table.sql
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
{% macro create_source_table() %}

{% if target.type == "redshift" %}
{% set disable_case_sensitive %}
reset enable_case_sensitive_identifier;
{% endset %}
{{ run_query(disable_case_sensitive) }}
{% endif %}

{% set target_schema=api.Relation.create(
database=target.database,
schema=target.schema ~ "__data_source_schema"
Expand Down Expand Up @@ -38,18 +31,11 @@ drop table if exists {{ target_schema }}.codegen_integration_tests__data_source_

{{ run_query(drop_table_sql_case_sensitive) }}

{% if target.type == "redshift" %}
{% set enable_case_sensitive %}
set enable_case_sensitive_identifier to true;
{% endset %}
{{ run_query(enable_case_sensitive) }}
{% endif %}

{% set create_table_sql_case_sensitive %}
create table {{ target_schema }}.codegen_integration_tests__data_source_table_case_sensitive as (
select
1 as {% if target.type == "bigquery" %}My_Integer_Col{% else %}"My_Integer_Col"{% endif %},
true as {% if target.type == "bigquery" %}My_Bool_Col{% else %}"My_Bool_Col"{% endif %}
1 as {{ adapter.quote("My_Integer_Col") }},
true as {{ adapter.quote("My_Bool_Col") }}
)
{% endset %}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ with source as (
renamed as (

select
{% if target.type == "bigquery" %}My_Integer_Col{% else %}"My_Integer_Col"{% endif %}
, {% if target.type == "bigquery" %}My_Bool_Col{% else %}"My_Bool_Col"{% endif %}
{{ adapter.quote("My_Integer_Col") }}
, {{ adapter.quote("My_Bool_Col") }}

from source

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ with source as (
renamed as (

select
{% if target.type == "bigquery" %}My_Integer_Col{% else %}"My_Integer_Col"{% endif %},
{% if target.type == "bigquery" %}My_Bool_Col{% else %}"My_Bool_Col"{% endif %}
{{ adapter.quote("My_Integer_Col") }},
{{ adapter.quote("My_Bool_Col") }}

from source

Expand Down
4 changes: 2 additions & 2 deletions macros/generate_base_model.sql
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ renamed as (
select
{%- if leading_commas -%}
{%- for column in column_names %}
{{", " if not loop.first}}{% if not case_sensitive_cols %}{{ column | lower }}{% elif target.type == "bigquery" %}{{ column }}{% else %}{{ "\"" ~ column ~ "\"" }}{% endif %}
{{", " if not loop.first}}{% if not case_sensitive_cols %}{{ column | lower }}{% else %}{{ adapter.quote(column) }}{% endif %}
{%- endfor %}
{%- else -%}
{%- for column in column_names %}
{% if not case_sensitive_cols %}{{ column | lower }}{% elif target.type == "bigquery" %}{{ column }}{% else %}{{ "\"" ~ column ~ "\"" }}{% endif %}{{"," if not loop.last}}
{% if not case_sensitive_cols %}{{ column | lower }}{% else %}{{ adapter.quote(column) }}{% endif %}{{"," if not loop.last}}
{%- endfor -%}
{%- endif %}

Expand Down

0 comments on commit 164103a

Please sign in to comment.