Skip to content

Commit

Permalink
dbt_netsuite v0.13.0 release (#117)
Browse files Browse the repository at this point in the history
* feature/performance-improvements
  • Loading branch information
fivetran-catfritz committed Apr 30, 2024
1 parent d9138f6 commit d16bb4a
Show file tree
Hide file tree
Showing 22 changed files with 549 additions and 246 deletions.
4 changes: 3 additions & 1 deletion .buildkite/hooks/pre-command
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,6 @@ export CI_SNOWFLAKE_DBT_WAREHOUSE=$(gcloud secrets versions access latest --secr
export CI_DATABRICKS_DBT_HOST=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_HOST" --project="dbt-package-testing-363917")
export CI_DATABRICKS_DBT_HTTP_PATH=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_HTTP_PATH" --project="dbt-package-testing-363917")
export CI_DATABRICKS_DBT_TOKEN=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_TOKEN" --project="dbt-package-testing-363917")
export CI_DATABRICKS_DBT_CATALOG=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_CATALOG" --project="dbt-package-testing-363917")
export CI_DATABRICKS_DBT_CATALOG=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_DBT_CATALOG" --project="dbt-package-testing-363917")
export CI_DATABRICKS_SQL_DBT_HTTP_PATH=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_SQL_DBT_HTTP_PATH" --project="dbt-package-testing-363917")
export CI_DATABRICKS_SQL_DBT_TOKEN=$(gcloud secrets versions access latest --secret="CI_DATABRICKS_SQL_DBT_TOKEN" --project="dbt-package-testing-363917")
17 changes: 16 additions & 1 deletion .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,19 @@ steps:
- "CI_DATABRICKS_DBT_TOKEN"
- "CI_DATABRICKS_DBT_CATALOG"
commands: |
bash .buildkite/scripts/run_models.sh databricks
bash .buildkite/scripts/run_models.sh databricks
- label: ":databricks: :database: Run Tests - Databricks SQL Warehouse"
key: "run_dbt_databricks_sql"
plugins:
- docker#v3.13.0:
image: "python:3.8"
shell: [ "/bin/bash", "-e", "-c" ]
environment:
- "BASH_ENV=/tmp/.bashrc"
- "CI_DATABRICKS_DBT_HOST"
- "CI_DATABRICKS_SQL_DBT_HTTP_PATH"
- "CI_DATABRICKS_SQL_DBT_TOKEN"
- "CI_DATABRICKS_DBT_CATALOG"
commands: |
bash .buildkite/scripts/run_models.sh databricks-sql
22 changes: 22 additions & 0 deletions .buildkite/scripts/run_models.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,33 @@ db=$1
echo `pwd`
cd integration_tests
dbt deps

if [ "$db" = "databricks-sql" ]; then
dbt seed --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db" --full-refresh
dbt compile --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db"
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db" --full-refresh
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db"
dbt test --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db"
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw, netsuite2__using_to_subsidiary: true, netsuite2__multibook_accounting_enabled: true, netsuite2__using_exchange_rate: false, netsuite2__using_vendor_categories: false, netsuite2__using_jobs: false}' --target "$db" --full-refresh
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw, netsuite2__using_to_subsidiary: true, netsuite2__multibook_accounting_enabled: true, netsuite2__using_exchange_rate: false, netsuite2__using_vendor_categories: false, netsuite2__using_jobs: false}' --target "$db"
dbt test --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db"
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw, netsuite2__using_to_subsidiary: true, netsuite2__using_exchange_rate: true}' --target "$db" --full-refresh
dbt run --vars '{netsuite_schema: netsuite_integrations_tests_sqlw, netsuite2__using_to_subsidiary: true, netsuite2__using_exchange_rate: true}' --target "$db"
dbt test --vars '{netsuite_schema: netsuite_integrations_tests_sqlw}' --target "$db"

else

dbt seed --target "$db" --full-refresh
dbt compile --target "$db"
dbt run --target "$db" --full-refresh
dbt run --target "$db"
dbt test --target "$db"
dbt run --vars '{netsuite2__using_to_subsidiary: true, netsuite2__multibook_accounting_enabled: true, netsuite2__using_exchange_rate: false, netsuite2__using_vendor_categories: false, netsuite2__using_jobs: false}' --target "$db" --full-refresh
dbt run --vars '{netsuite2__using_to_subsidiary: true, netsuite2__multibook_accounting_enabled: true, netsuite2__using_exchange_rate: false, netsuite2__using_vendor_categories: false, netsuite2__using_jobs: false}' --target "$db"
dbt test --target "$db"
dbt run --vars '{netsuite2__using_to_subsidiary: true, netsuite2__using_exchange_rate: true}' --target "$db" --full-refresh
dbt run --vars '{netsuite2__using_to_subsidiary: true, netsuite2__using_exchange_rate: true}' --target "$db"
dbt test --target "$db"
fi

dbt run-operation fivetran_utils.drop_schemas_automation --target "$db"
13 changes: 13 additions & 0 deletions .github/workflows/auto-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: 'auto release'
on:
pull_request:
types:
- closed
branches:
- main

jobs:
call-workflow-passing-data:
if: github.event.pull_request.merged
uses: fivetran/dbt_package_automations/.github/workflows/auto-release.yml@main
secrets: inherit
48 changes: 48 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,51 @@
# dbt_netsuite v0.13.0

For Netsuite2, [PR #116](https://github.com/fivetran/dbt_netsuite/pull/116) includes the following updates:

## 🚨 Breaking Changes 🚨
> ⚠️ Since the following changes are breaking, a `--full-refresh` after upgrading will be required.
- Performance improvements:
- Snowflake, Postgres, and Redshift destinations:
- Added an incremental strategy for the following models:
- `int_netsuite2__tran_with_converted_amounts`
- `netsuite2__balance_sheet`
- `netsuite2__income_statement`
- `netsuite2__transaction_details`
- Bigquery and Databricks destinations:
- Due to the variation in pricing and runtime priorities for customers, by default we chose to materialize these models as tables instead of incremental materialization for Bigquery and Databricks. For more information on this decision, see the [Incremental Strategy section](https://github.com/fivetran/dbt_netsuite/blob/main/DECISIONLOG.md#incremental-strategy) of the DECISIONLOG.
- To enable incremental materialization for these destinations, see the [Incremental Materialization section](https://github.com/fivetran/dbt_netsuite/blob/main/README.md#-adding-incremental-materialization-for-bigquery-and-databricks) of the README for instructions.

- To reduce storage, updated the default materialization of the upstream staging models to views. (See the [dbt_netsuite_source CHANGELOG](https://github.com/fivetran/dbt_netsuite_source/blob/main/CHANGELOG.md#dbt_netsuite_source-v0100) for more details.)

## 🎉 Features
- Added a default 3-day look-back to incremental models to accommodate late arriving records, based on the `_fivetran_synced_date` of transaction records. The number of days can be changed by setting the var `lookback_window` in your dbt_project.yml. See the [Lookback Window section of the README](https://github.com/fivetran/dbt_netsuite/blob/main/README.md#lookback-window) for more details.
- Added macro `netsuite_lookback` to streamline the lookback calculation.

## Under the Hood:
- Added integration testing pipeline for Databricks SQL Warehouse.
- Included auto-releaser GitHub Actions workflow to automate future releases.

For Netsuite2, [PR #114](https://github.com/fivetran/dbt_netsuite/pull/114) includes the following updates:

## Features
- Added the following columns to model `netsuite2__transaction_details`:
- department_id
- entity_id
- is_closed
- is_main_line
- is_tax_line
- item_id
- transaction_number
- ❗Note: If you have already added any of these fields as passthrough columns to the `transactions_pass_through_columns`, `transaction_lines_pass_through_columns`, `accounts_pass_through_columns`, or `departments_pass_through_columns` vars, you will need to remove or alias these fields from the var to avoid duplicate column errors.

- Removed the unnecessary reference to `entities` in the `netsuit2__transaction_details` model.

## 📝 Documentation Update 📝
- [Updated DECISIONLOG](https://github.com/fivetran/dbt_netsuite/blob/main/DECISIONLOG.md#why-converted-transaction-amounts-are-null-if-they-are-non-posting) with our reasoning for why we don't bring in future-facing transactions and leave the `converted_amount` in transaction details empty. ([#115](https://github.com/fivetran/dbt_netsuite/issues/115))

## Contributors:
- [@FrankTub](https://github.com/FrankTub) ([#114](https://github.com/fivetran/dbt_netsuite/issues/114))

# dbt_netsuite v0.12.0
## 🎁 Official release for Netsuite2! 🎁
[PR #98](https://github.com/fivetran/dbt_netsuite/pull/98) is the official supported release of [dbt_netsuite v0.12.0-b1](https://github.com/fivetran/dbt_netsuite/releases/tag/v0.12.0-b1).
Expand Down
25 changes: 24 additions & 1 deletion DECISIONLOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,27 @@ In the `int_netsuite__transactions_with_converted_amounts` model, account types

## Creation of 'Other' Account Category for Non Posting and Statistical Account Types

As mentioned above, in the `int_netsuite__transactions_with_converted_amounts`/`int_netsuite2_tran_with_converted_amounts` models, account types are bucketed into broader account _categories_. There is no standard category for `Non Posting` and `Statistical` account types and transactions from these kinds of accounts are excluded from all financial reports. However, they are used for other workflows in Netsuite, so we have bucketed `Non Posting` and `Statistical` account types into a new `Other` account category.
As mentioned above, in the `int_netsuite__transactions_with_converted_amounts`/`int_netsuite2_tran_with_converted_amounts` models, account types are bucketed into broader account _categories_. There is no standard category for `Non Posting` and `Statistical` account types and transactions from these kinds of accounts are excluded from all financial reports. However, they are used for other workflows in Netsuite, so we have bucketed `Non Posting` and `Statistical` account types into a new `Other` account category.

## Why converted transaction amounts are null if they are non-posting

In our `intermediate` Netsuite models, we translate amounts from posted transactions into their proper `converted_amount` values based on the exchange rates in the reporting and transaction periods. That way, customers will always have accurate `converted_amount` data that can help them validate their financial reporting.

For the sake of financial fidelity, we decided not to convert amounts that are non-posting because the exchange rates are subject to change. While that can provide additional value for customers looking to do financial forecasting, we do not want to create confusion by bringing in converted transactions that have amounts that are variable to change, and disrupt existing financial reporting processes.

For customers interested in creating future-facing converted_amount values, our recommendation would be to materialize the intermediate models as view or tables to grab the exchange rate data in your internal warehouse, then leverage the transaction_amount in these particular cases to produce the future converted_amounts. You can update the models section of your dbt_project.yml to update the materialization.

```yml
models:
netsuite:
netsuite2:
intermediate:
+materialized: [table or view]

## Incremental Strategy Selection

For incremental models, we have chosen the `delete+insert` strategy for PostgreSQL, Redshift, and Snowflake destinations.

For Bigquery and Databricks, we have turned off incremental strategy by default since we did not want to cause unexpected warehouse costs for users. If you choose to enable the incremental materialization for these destinations, we have set it up to use the `merge` strategy. For instructions on how to enable the incremental strategy, see the [README](https://github.com/fivetran/dbt_netsuite?tab=readme-ov-file#adding-incremental-materialization-for-bigquery-and-databricks).

These strategies were selected since transaction records can be updated retroactively, and `merge` and `delete+insert` work well since they rely on a unique id to identify records to update or replace.
34 changes: 32 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ Include the following netsuite package version in your `packages.yml` file:
```yaml
packages:
- package: fivetran/netsuite
version: [">=0.12.0", "<0.13.0"]
version: [">=0.13.0", "<0.14.0"]
```
## Step 3: Define Netsuite.com or Netsuite2 Source
As of April 2022 Fivetran made available a new Netsuite connector which leverages the Netsuite2 endpoint opposed to the original Netsuite.com endpoint. This package is designed to run for either or, not both. By default the `netsuite_data_model` variable for this package is set to the original `netsuite` value which runs the netsuite.com version of the package. If you would like to run the package on Netsuite2 data, you may adjust the `netsuite_data_model` variable to run the `netsuite2` version of the package.
Expand Down Expand Up @@ -232,6 +232,36 @@ vars:
### Override the data models variable
This package is designed to run **either** the Netsuite.com or Netsuite2 data models. However, for documentation purposes, an additional variable `netsuite_data_model_override` was created to allow for both data model types to be run at the same time by setting the variable value to `netsuite`. This is only to ensure the [dbt docs](https://fivetran.github.io/dbt_netsuite/) (which is hosted on this repository) is generated for both model types. While this variable is provided, we recommend you do not adjust the variable and instead change the `netsuite_data_model` variable to fit your configuration needs.

### Lookback Window
Records from the source can sometimes arrive late. Since several of the models in this package are incremental, by default we look back 3 days from the `_fivetran_synced_date` of transaction records to ensure late arrivals are captured and avoiding the need for frequent full refreshes. While the frequency can be reduced, we still recommend running `dbt --full-refresh` periodically to maintain data quality of the models.

To change the default lookback window, add the following variable to your `dbt_project.yml` file:

```yml
vars:
netsuite:
lookback_window: number_of_days # default is 3
```

### Adding incremental materialization for Bigquery and Databricks
Since pricing and runtime priorities vary by customer, by default we chose to materialize the below models as tables instead of an incremental materialization for Bigquery and Databricks. For more information on this decision, see the [Incremental Strategy section](https://github.com/fivetran/dbt_netsuite/blob/main/DECISIONLOG.md#incremental-strategy) of the DECISIONLOG.

If you wish to enable incremental materializations leveraging the `merge` strategy, you can add the below materialization settings to your `dbt_project.yml` file. You only need to add lines for the specific model materializations you wish to change.
```yml
models:
netsuite:
netsuite2:
netsuite2__income_statement:
+materialized: incremental # default is table for Bigquery and Databricks
netsuite2__transaction_details:
+materialized: incremental # default is table for Bigquery and Databricks
netsuite2__balance_sheet:
+materialized: incremental # default is table for Bigquery and Databricks
intermediate:
int_netsuite2__tran_with_converted_amounts:
+materialized: incremental # default is ephemeral for Bigquery and Databricks
```

## (Optional) Step 7: Produce Analytics-Ready Reports with Streamlit App (Bigquery and Snowflake users only)
For those who want to take their reports a step further, our team has created the [Fivetran Netsuite Streamlit App](https://fivetran-netsuite.streamlit.app/) to generate end model visualizations based off of the reports we created in this package. This way you can replicate much of the reporting you see internally in Netsuite and automate a lot of the work needed to report on your core metrics.

Expand All @@ -252,7 +282,7 @@ This dbt package is dependent on the following dbt packages. Please be aware tha
```yml
packages:
- package: fivetran/netsuite_source
version: [">=0.9.0", "<0.10.0"]
version: [">=0.10.0", "<0.11.0"]
- package: fivetran/fivetran_utils
version: [">=0.4.0", "<0.5.0"]
Expand Down
2 changes: 1 addition & 1 deletion dbt_project.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
config-version: 2
name: 'netsuite'
version: '0.12.0'
version: '0.13.0'
require-dbt-version: [">=1.3.0", "<2.0.0"]

models:
Expand Down
2 changes: 1 addition & 1 deletion docs/catalog.json

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions docs/index.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/manifest.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/run_results.json

Large diffs are not rendered by default.

8 changes: 8 additions & 0 deletions integration_tests/ci/sample.profiles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,12 @@ integration_tests:
schema: netsuite_integration_tests_5
threads: 2
token: "{{ env_var('CI_DATABRICKS_DBT_TOKEN') }}"
type: databricks
databricks-sql:
catalog: "{{ env_var('CI_DATABRICKS_DBT_CATALOG') }}"
host: "{{ env_var('CI_DATABRICKS_DBT_HOST') }}"
http_path: "{{ env_var('CI_DATABRICKS_SQL_DBT_HTTP_PATH') }}"
schema: netsuite_integrations_tests_sqlw
threads: 8
token: "{{ env_var('CI_DATABRICKS_SQL_DBT_TOKEN') }}"
type: databricks
6 changes: 4 additions & 2 deletions integration_tests/dbt_project.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
name: 'netsuite_integration_tests'
version: '0.12.0'
version: '0.13.0'
profile: 'integration_tests'
config-version: 2

models:
+materialized: table
+schema: "{{ 'netsuite_integrations_tests_sqlw' if target.name == 'databricks-sql' else 'netsuite' }}"

vars:
netsuite_schema: netsuite_integration_tests_5
netsuite_data_model_override: netsuite
Expand Down
18 changes: 18 additions & 0 deletions macros/netsuite_lookback.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{% macro netsuite_lookback(from_date, datepart, interval, safety_date='2010-01-01') %}

{{ adapter.dispatch('netsuite_lookback', 'netsuite') (from_date, datepart, interval, safety_date='2010-01-01') }}

{%- endmacro %}

{% macro default__netsuite_lookback(from_date, datepart, interval, safety_date='2010-01-01') %}

{% set sql_statement %}
select coalesce({{ from_date }}, {{ "'" ~ safety_date ~ "'" }})
from {{ this }}
{%- endset -%}

{%- set result = dbt_utils.get_single_value(sql_statement) %}

{{ dbt.dateadd(datepart=datepart, interval=-interval, from_date_or_timestamp="cast('" ~ result ~ "' as date)") }}

{% endmacro %}
23 changes: 21 additions & 2 deletions models/netsuite2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@ models:
tests:
- unique
- not_null
- name: _fivetran_synced_date
description: _fivetran_synced formatted as a date for incremental logic use.

- name: netsuite2__income_statement
description: >
Expand Down Expand Up @@ -176,7 +178,8 @@ models:
tests:
- unique
- not_null

- name: _fivetran_synced_date
description: _fivetran_synced formatted as a date for incremental logic use.

- name: netsuite2__transaction_details
description: >
Expand Down Expand Up @@ -303,4 +306,20 @@ models:
description: Surrogate key hashed on `transaction_line_id` and `transaction_id`. Adds `to_subsidiary_id` if using subsidiaries and `accounting_book_id` if using multibook accounting.
tests:
- unique
- not_null
- not_null
- name: _fivetran_synced_date
description: _fivetran_synced formatted as a date for incremental logic use.
- name: department_id
description: "{{ doc('department_id') }}"
- name: entity_id
description: "{{ doc('entity_id') }}"
- name: is_closed
description: Boolean indicating if the accounting period is closed.
- name: is_main_line
description: Boolean indicating if the transaction line is a main line entry.
- name: is_tax_line
description: Boolean indicating if the transaction line is a tax line..
- name: item_id
description: "{{ doc('item_id') }}"
- name: transaction_number
description: The Netsuite generated number of the transaction.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ transaction_lines_w_accounting_period as ( -- transaction line totals, by accoun
{% endif %}

transactions.accounting_period_id as transaction_accounting_period_id,
coalesce(transaction_lines.amount, 0) as unconverted_amount
coalesce(transaction_lines.amount, 0) as unconverted_amount,
transactions._fivetran_synced_date
from transaction_lines

join transactions on transactions.transaction_id = transaction_lines.transaction_id
Expand Down
Loading

0 comments on commit d16bb4a

Please sign in to comment.